code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from django.db import models
from datetime import datetime
class Place(models.Model):
"""
Holder object for basic info about the rooms
in the university.
"""
room_place = models.CharField(max_length=255)
floor = models.IntegerField()
def __unicode__(self):
return self.room_place
class HierarchyUnit(models.Model):
PROGRAM = 'PR'
YEAR = 'YR'
GROUP = 'GR'
TYPES = (
(PROGRAM, u"Специалност"),
(YEAR, u"Курс"),
(GROUP, u"Група"),
)
type_value = models.CharField(max_length=255, choices=TYPES)
value = models.CharField(max_length=255)
parent = models.ForeignKey("schedule.HierarchyUnit", null=True, blank=True, default=None)
def get_all_info_for_parents(self):
if self.type_value == 'PR':
return self.value
if self.type_value == 'YR':
return ', '.join([self.parent.value, self.value+u' курс'])
else:
return ', '.join([self.parent.parent.value, self.parent.value+u' курс', self.value+u' група'])
def get_all_childs(self):
return HierarchyUnit.objects.filter(parent=self)
def __unicode__(self):
return self.get_all_info_for_parents()
class Block(models.Model):
"""
Group representing a set of optional subjects.
Example: Core of Computer Science.
"""
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Subject(models.Model):
"""
Representation of all subjects.
Example: Calculus 1.
"""
MANDATORY = 'MN'
OPTIONAL = 'OP'
TYPES = (
(MANDATORY, u"Задължителен"),
(OPTIONAL, u"Избираем"),
)
type_value = models.CharField(max_length=255, choices=TYPES)
name = models.CharField(max_length=255)
block = models.ForeignKey(Block, null=True, blank=True, default=None)
year = models.ForeignKey(HierarchyUnit, null=True, blank=True, default=None, limit_choices_to={'type_value': HierarchyUnit.YEAR})
def get_year_value(self):
return ', '.join([self.year.parent.value, self.year.value+u' курс'])
def __unicode__(self):
return self.name
class Department(models.Model):
"""
Group representing a set of lecturers
grouped by field of teaching.
"""
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Teacher(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255)
email = models.CharField(max_length=255)
full_name = models.CharField(max_length=255)
position = models.CharField(max_length=255)
subjects = models.ManyToManyField(Subject, null=True, blank=True, default=None)
department = models.ForeignKey(Department, null=True, blank=True, default=None)
def __unicode__(self):
return self.name
class Event(models.Model):
WEEKLY = 'WKL'
TYPES = (
(WEEKLY, u'Седмично'),
)
type_value = models.CharField(max_length=255, null=True, blank=True, default=None)
inserted = models.DateField(default=datetime.now())
name = models.CharField(max_length=255)
place = models.ForeignKey(Place, blank=True, default=None, null=True)
date_start = models.DateTimeField()
date_end = models.DateTimeField(default=datetime.now())
repeatable = models.BooleanField()
duratation = models.IntegerField()
subject = models.ForeignKey(Subject, blank=True, default=None, null=True)
teacher = models.ForeignKey(Teacher, blank=True, default=None, null=True)
def __unicode__(self):
return self.name
class Student(models.Model):
PROGRAM = (('BK', 'Бакалавър'),('MG', 'Магистър'))
name = models.CharField(max_length=255)
program = models.CharField(max_length=255,choices=PROGRAM, blank=True, default=PROGRAM[0][0])
fac_number = models.CharField(max_length=255)
email = models.CharField(max_length=255)
group = models.ForeignKey(HierarchyUnit, limit_choices_to={'type_value': HierarchyUnit.GROUP}, blank=True, default=None, null=True)
events = models.ManyToManyField(Event, blank=True, default=None, null=True)
def __unicode__(self):
return self.name
class Comment(models.Model):
from_user = models.ForeignKey(Student, blank=True, default=None, null=True)
event = models.ForeignKey(Event, blank=True, default=None, null=True)
start_date = models.DateField()
end_date = models.DateField()
dtstamp = models.DateField(default=datetime.now())
desc = models.TextField() | DeltaEpsilon-HackFMI2/FMICalendar-REST | schedule/models.py | Python | mit | 4,666 |
from cherrydo.utils import is_cherrydo_project
class CherryDoException(Exception):
pass
class BaseGenerator(object):
def __init__(self, name, params):
self.name = name
self.params = params
def formatted_name(self):
return self.name.replace('_', ' ').title().replace(' ', '')
def validate(self):
pass
def default_context(self):
return {}
def create(self):
return True
class CherryDoGenerator(BaseGenerator):
def validate(self):
if not is_cherrydo_project():
raise CherryDoException('CherryDo project not found!')
| mihail-ivanov/cherrydo | cherrydo/common.py | Python | mit | 619 |
from libdotfiles.packages import try_install
from libdotfiles.util import HOME_DIR, PKG_DIR, copy_file
try_install("alacritty")
copy_file(
PKG_DIR / "alacritty.yml",
HOME_DIR / ".config" / "alacritty" / "alacritty.yml",
)
| rr-/dotfiles | cfg/alacritty/__main__.py | Python | mit | 231 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# CAVEAT UTILITOR
#
# This file was automatically generated by Grako.
#
# https://pypi.python.org/pypi/grako/
#
# Any changes you make to it will be overwritten the next time
# the file is generated.
from __future__ import print_function, division, absolute_import, unicode_literals
from grako.parsing import graken, Parser
from grako.util import re, RE_FLAGS
__version__ = (2015, 12, 26, 22, 15, 59, 5)
__all__ = [
'BParser',
'BSemantics',
'main'
]
class BParser(Parser):
def __init__(self,
whitespace=None,
nameguard=None,
comments_re='/\\*((?:[^\\*]|\\*[^/]|\\n)*?)\\*+/',
eol_comments_re=None,
ignorecase=None,
left_recursion=False,
**kwargs):
super(BParser, self).__init__(
whitespace=whitespace,
nameguard=nameguard,
comments_re=comments_re,
eol_comments_re=eol_comments_re,
ignorecase=ignorecase,
left_recursion=left_recursion,
**kwargs
)
@graken()
def _program_(self):
def block1():
self._definition_()
self._cut()
self._closure(block1)
self.ast['@'] = self.last_node
self._check_eof()
@graken()
def _definition_(self):
with self._choice():
with self._option():
self._simpledef_()
with self._option():
self._vectordef_()
with self._option():
self._functiondef_()
self._error('no available options')
@graken()
def _simpledef_(self):
self._name_()
self.ast['name'] = self.last_node
with self._optional():
self._ival_()
self.ast['init'] = self.last_node
self._token(';')
self.ast._define(
['name', 'init'],
[]
)
@graken()
def _vectordef_(self):
self._name_()
self.ast['name'] = self.last_node
self._token('[')
with self._optional():
self._constantexpr_()
self.ast['maxidx'] = self.last_node
self._token(']')
with self._optional():
self._ivallist_()
self.ast['ivals'] = self.last_node
self._token(';')
self.ast._define(
['name', 'maxidx', 'ivals'],
[]
)
@graken()
def _ivallist_(self):
self._ival_()
self.ast.setlist('@', self.last_node)
def block1():
self._token(',')
self._ival_()
self.ast.setlist('@', self.last_node)
self._closure(block1)
@graken()
def _ival_(self):
with self._choice():
with self._option():
self._numericexpr_()
with self._option():
self._characterexpr_()
with self._option():
self._stringexpr_()
self._error('no available options')
@graken()
def _functiondef_(self):
self._name_()
self.ast['name'] = self.last_node
self._token('(')
with self._optional():
self._namelist_()
self.ast['args'] = self.last_node
self._token(')')
self._cut()
self._statement_()
self.ast['body'] = self.last_node
self.ast._define(
['name', 'args', 'body'],
[]
)
@graken()
def _statement_(self):
with self._choice():
with self._option():
self._labelstatement_()
with self._option():
self._gotostatement_()
with self._option():
self._switchstatement_()
with self._option():
self._casestatement_()
with self._option():
self._breakstatement_()
with self._option():
self._autostatement_()
with self._option():
self._extrnstatement_()
with self._option():
self._compoundstatement_()
with self._option():
self._ifstatement_()
with self._option():
self._whilestatement_()
with self._option():
self._returnstatement_()
with self._option():
self._exprstatement_()
with self._option():
self._nullstatement_()
self._error('no available options')
@graken()
def _labelstatement_(self):
with self._ifnot():
with self._group():
self._token('default')
self._name_()
self.ast['label'] = self.last_node
self._token(':')
self._statement_()
self.ast['statement'] = self.last_node
self.ast._define(
['label', 'statement'],
[]
)
@graken()
def _gotostatement_(self):
self._token('goto')
self._cut()
self._name_()
self.ast['label'] = self.last_node
self._token(';')
self.ast._define(
['label'],
[]
)
@graken()
def _switchstatement_(self):
self._token('switch')
self._cut()
self._expr_()
self.ast['rvalue'] = self.last_node
self._cut()
self._statement_()
self.ast['body'] = self.last_node
self.ast._define(
['rvalue', 'body'],
[]
)
@graken()
def _casestatement_(self):
with self._group():
with self._choice():
with self._option():
with self._group():
self._token('case')
self._constantexpr_()
self.ast['cond'] = self.last_node
with self._option():
self._token('default')
self._error('expecting one of: default')
self._cut()
self._token(':')
self._statement_()
self.ast['then'] = self.last_node
self.ast._define(
['cond', 'then'],
[]
)
@graken()
def _breakstatement_(self):
self._token('break')
self._token(';')
@graken()
def _autostatement_(self):
self._token('auto')
self._cut()
self._autovar_()
self.ast.setlist('@', self.last_node)
def block1():
self._token(',')
self._autovar_()
self.ast.setlist('@', self.last_node)
self._closure(block1)
self._token(';')
@graken()
def _autovar_(self):
self._name_()
self.ast['name'] = self.last_node
with self._optional():
self._token('[')
self._constantexpr_()
self.ast['maxidx'] = self.last_node
self._token(']')
self.ast._define(
['name', 'maxidx'],
[]
)
@graken()
def _extrnstatement_(self):
self._token('extrn')
self._cut()
self._namelist_()
self.ast['@'] = self.last_node
self._token(';')
@graken()
def _compoundstatement_(self):
self._token('{')
self._cut()
def block1():
self._statement_()
self._cut()
self._closure(block1)
self.ast['@'] = self.last_node
self._token('}')
@graken()
def _ifstatement_(self):
self._token('if')
self._cut()
self._token('(')
self._expr_()
self.ast['cond'] = self.last_node
self._token(')')
self._statement_()
self.ast['then'] = self.last_node
with self._optional():
self._token('else')
self._statement_()
self.ast['otherwise'] = self.last_node
self.ast._define(
['cond', 'then', 'otherwise'],
[]
)
@graken()
def _whilestatement_(self):
self._token('while')
self._cut()
self._token('(')
self._expr_()
self.ast['cond'] = self.last_node
self._token(')')
self._statement_()
self.ast['body'] = self.last_node
self.ast._define(
['cond', 'body'],
[]
)
@graken()
def _returnstatement_(self):
self._token('return')
self._cut()
with self._optional():
self._token('(')
self._expr_()
self.ast['return_value'] = self.last_node
self._token(')')
self._token(';')
self.ast._define(
['return_value'],
[]
)
@graken()
def _exprstatement_(self):
self._expr_()
self.ast['@'] = self.last_node
self._token(';')
@graken()
def _nullstatement_(self):
self._token(';')
@graken()
def _expr_(self):
self._assignexpr_()
@graken()
def _assignexpr_(self):
self._condexpr_()
self.ast['lhs'] = self.last_node
with self._optional():
self._assignop_()
self.ast['op'] = self.last_node
self._assignexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['lhs', 'op', 'rhs'],
[]
)
@graken()
def _assignop_(self):
self._pattern(r'=([+\-/\*%&^|]|[=!]=|>[=>]?|<[=<]?)?')
@graken()
def _condexpr_(self):
self._orexpr_()
self.ast['cond'] = self.last_node
with self._optional():
self._token('?')
self._condexpr_()
self.ast['then'] = self.last_node
self._token(':')
self._condexpr_()
self.ast['otherwise'] = self.last_node
self.ast._define(
['cond', 'then', 'otherwise'],
[]
)
@graken()
def _orexpr_(self):
self._xorexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._ortail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _ortail_(self):
self._token('|')
self.ast['op'] = self.last_node
self._xorexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _xorexpr_(self):
self._andexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._xortail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _xortail_(self):
self._token('^')
self.ast['op'] = self.last_node
self._andexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _andexpr_(self):
self._eqexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._andtail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _andtail_(self):
self._token('&')
self.ast['op'] = self.last_node
self._eqexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _eqexpr_(self):
self._relexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._eqtail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _eqtail_(self):
self._eqop_()
self.ast['op'] = self.last_node
self._relexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _eqop_(self):
self._pattern(r'[!=]=')
@graken()
def _relexpr_(self):
self._shiftexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._reltail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _reltail_(self):
self._relop_()
self.ast['op'] = self.last_node
self._shiftexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _relop_(self):
self._pattern(r'[<>]={0,1}')
@graken()
def _shiftexpr_(self):
self._addexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._shifttail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _shifttail_(self):
self._shiftop_()
self.ast['op'] = self.last_node
self._addexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _shiftop_(self):
self._pattern(r'<<|>>')
@graken()
def _addexpr_(self):
self._multexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._addtail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _addtail_(self):
self._addop_()
self.ast['op'] = self.last_node
self._multexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _addop_(self):
self._pattern(r'[+-]')
@graken()
def _multexpr_(self):
self._unaryexpr_()
self.ast['lhs'] = self.last_node
def block2():
self._multtail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['lhs', 'tail'],
[]
)
@graken()
def _multtail_(self):
self._multop_()
self.ast['op'] = self.last_node
self._unaryexpr_()
self.ast['rhs'] = self.last_node
self.ast._define(
['op', 'rhs'],
[]
)
@graken()
def _multop_(self):
self._pattern(r'[/%\*]')
@graken()
def _unaryexpr_(self):
def block1():
self._leftunaryop_()
self._closure(block1)
self.ast['leftops'] = self.last_node
self._primaryexpr_()
self.ast['rhs'] = self.last_node
def block4():
self._rightunaryop_()
self._closure(block4)
self.ast['rightops'] = self.last_node
self.ast._define(
['leftops', 'rhs', 'rightops'],
[]
)
@graken()
def _leftunaryop_(self):
self._pattern(r'[\*&!\~]|--?|\+\+')
@graken()
def _rightunaryop_(self):
with self._choice():
with self._option():
self._token('++')
with self._option():
self._token('--')
self._error('expecting one of: ++ --')
@graken()
def _primaryexpr_(self):
self._primaryexprhead_()
self.ast['head'] = self.last_node
def block2():
self._primaryexprtail_()
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['head', 'tail'],
[]
)
@graken()
def _primaryexprhead_(self):
with self._choice():
with self._option():
self._token('(')
self._expr_()
self.ast['@'] = self.last_node
self._token(')')
with self._option():
self._builtinexpr_()
with self._option():
self._variableexpr_()
with self._option():
self._constantexpr_()
with self._option():
self._stringexpr_()
self._error('no available options')
@graken()
def _primaryexprtail_(self):
with self._choice():
with self._option():
self._token('(')
with self._optional():
self._exprlist_()
self.ast['args'] = self.last_node
self._token(')')
with self._option():
self._token('[')
self._expr_()
self.ast['index'] = self.last_node
self._token(']')
self._error('expecting one of: (')
self.ast._define(
['args', 'index'],
[]
)
@graken()
def _variableexpr_(self):
with self._ifnot():
self._builtinexpr_()
self._name_()
@graken()
def _constantexpr_(self):
with self._choice():
with self._option():
self._numericexpr_()
with self._option():
self._characterexpr_()
self._error('no available options')
@graken()
def _builtinexpr_(self):
self._token('__bytes_per_word')
@graken()
def _numericexpr_(self):
def block0():
self._NUMERIC_()
self._positive_closure(block0)
@graken()
def _characterexpr_(self):
self._token("'")
def block1():
self._CHARACTERCONSTCHAR_()
self._closure(block1)
self.ast['@'] = self.last_node
self._token("'")
@graken()
def _stringexpr_(self):
self._token('"')
def block1():
self._STRINGCONSTCHAR_()
self._closure(block1)
self.ast['@'] = self.last_node
self._token('"')
@graken()
def _name_(self):
self._ALPHA_()
self.ast['head'] = self.last_node
def block2():
with self._choice():
with self._option():
self._ALPHA_()
with self._option():
self._NUMERIC_()
self._error('no available options')
self._closure(block2)
self.ast['tail'] = self.last_node
self.ast._define(
['head', 'tail'],
[]
)
@graken()
def _ALPHA_(self):
self._pattern(r'[A-Za-z_\.\b]')
@graken()
def _NUMERIC_(self):
self._pattern(r'[0-9]')
@graken()
def _CHARACTERCONSTCHAR_(self):
self._pattern(r"([^'\*])|(\*.)")
@graken()
def _STRINGCONSTCHAR_(self):
self._pattern(r'([^"\*])|(\*.)')
@graken()
def _exprlist_(self):
self._expr_()
self.ast.setlist('@', self.last_node)
def block1():
self._token(',')
self._expr_()
self.ast.setlist('@', self.last_node)
self._closure(block1)
@graken()
def _namelist_(self):
self._name_()
self.ast.setlist('@', self.last_node)
def block1():
self._token(',')
self._name_()
self.ast.setlist('@', self.last_node)
self._closure(block1)
class BSemantics(object):
def program(self, ast):
return ast
def definition(self, ast):
return ast
def simpledef(self, ast):
return ast
def vectordef(self, ast):
return ast
def ivallist(self, ast):
return ast
def ival(self, ast):
return ast
def functiondef(self, ast):
return ast
def statement(self, ast):
return ast
def labelstatement(self, ast):
return ast
def gotostatement(self, ast):
return ast
def switchstatement(self, ast):
return ast
def casestatement(self, ast):
return ast
def breakstatement(self, ast):
return ast
def autostatement(self, ast):
return ast
def autovar(self, ast):
return ast
def extrnstatement(self, ast):
return ast
def compoundstatement(self, ast):
return ast
def ifstatement(self, ast):
return ast
def whilestatement(self, ast):
return ast
def returnstatement(self, ast):
return ast
def exprstatement(self, ast):
return ast
def nullstatement(self, ast):
return ast
def expr(self, ast):
return ast
def assignexpr(self, ast):
return ast
def assignop(self, ast):
return ast
def condexpr(self, ast):
return ast
def orexpr(self, ast):
return ast
def ortail(self, ast):
return ast
def xorexpr(self, ast):
return ast
def xortail(self, ast):
return ast
def andexpr(self, ast):
return ast
def andtail(self, ast):
return ast
def eqexpr(self, ast):
return ast
def eqtail(self, ast):
return ast
def eqop(self, ast):
return ast
def relexpr(self, ast):
return ast
def reltail(self, ast):
return ast
def relop(self, ast):
return ast
def shiftexpr(self, ast):
return ast
def shifttail(self, ast):
return ast
def shiftop(self, ast):
return ast
def addexpr(self, ast):
return ast
def addtail(self, ast):
return ast
def addop(self, ast):
return ast
def multexpr(self, ast):
return ast
def multtail(self, ast):
return ast
def multop(self, ast):
return ast
def unaryexpr(self, ast):
return ast
def leftunaryop(self, ast):
return ast
def rightunaryop(self, ast):
return ast
def primaryexpr(self, ast):
return ast
def primaryexprhead(self, ast):
return ast
def primaryexprtail(self, ast):
return ast
def variableexpr(self, ast):
return ast
def constantexpr(self, ast):
return ast
def builtinexpr(self, ast):
return ast
def numericexpr(self, ast):
return ast
def characterexpr(self, ast):
return ast
def stringexpr(self, ast):
return ast
def name(self, ast):
return ast
def ALPHA(self, ast):
return ast
def NUMERIC(self, ast):
return ast
def CHARACTERCONSTCHAR(self, ast):
return ast
def STRINGCONSTCHAR(self, ast):
return ast
def exprlist(self, ast):
return ast
def namelist(self, ast):
return ast
def main(filename, startrule, trace=False, whitespace=None, nameguard=None):
import json
with open(filename) as f:
text = f.read()
parser = BParser(parseinfo=False)
ast = parser.parse(
text,
startrule,
filename=filename,
trace=trace,
whitespace=whitespace,
nameguard=nameguard)
print('AST:')
print(ast)
print()
print('JSON:')
print(json.dumps(ast, indent=2))
print()
if __name__ == '__main__':
import argparse
import string
import sys
class ListRules(argparse.Action):
def __call__(self, parser, namespace, values, option_string):
print('Rules:')
for r in BParser.rule_list():
print(r)
print()
sys.exit(0)
parser = argparse.ArgumentParser(description="Simple parser for B.")
parser.add_argument('-l', '--list', action=ListRules, nargs=0,
help="list all rules and exit")
parser.add_argument('-n', '--no-nameguard', action='store_true',
dest='no_nameguard',
help="disable the 'nameguard' feature")
parser.add_argument('-t', '--trace', action='store_true',
help="output trace information")
parser.add_argument('-w', '--whitespace', type=str, default=string.whitespace,
help="whitespace specification")
parser.add_argument('file', metavar="FILE", help="the input file to parse")
parser.add_argument('startrule', metavar="STARTRULE",
help="the start rule for parsing")
args = parser.parse_args()
main(
args.file,
args.startrule,
trace=args.trace,
whitespace=args.whitespace,
nameguard=not args.no_nameguard
)
| rjw57/rbc | rbc/parser.py | Python | mit | 24,773 |
#!/usr/bin/env python
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.utils.html import format_html
from django.forms.util import flatatt
from django.utils.encoding import force_text
from mezzanine.conf import settings
from cartridge.shop.forms import OrderForm
from cartridge.shop import checkout
from cartridge.shop.utils import make_choices
from cartridge_braintree.countries import get_country_names_list
class DataEncryptedTextInput(forms.TextInput):
def render(self, name, value, attrs=None):
# See django.forms.widgets.py,
# class Input, method render()
if value is None:
value = ''
if attrs is None:
attrs = {}
attrs['name'] = name
attrs['autocomplete'] = 'off'
attrs['data-encrypted-name'] = name
final_attrs = self.build_attrs(attrs, type=self.input_type)
# Never add the value to the HTML rendering, this field
# will be encrypted and should remain blank if the form is
# re-loaded!
final_attrs['value'] = ''
return format_html('<input{0} />', flatatt(final_attrs))
class DataEncryptedPasswordInput(DataEncryptedTextInput):
input_type = 'password'
class BraintreeOrderForm(OrderForm):
"""
The following changes are made to the cartridge order form:
- Shipping and Billing country fields are rendered using
a Select widget. This ensures the country selected can be
converted to a valid code for Braintree's payment processing.
- Credit Card number and CCV fields are rendered using the
DataEncryptedTextInput and DataEncryptedPasswordInput widgets
so that the HTML form inputs match what is required for braintree.js
See https://www.braintreepayments.com/docs/python/guide/getting_paid
"""
def __init__(self, request, step, data=None, initial=None, errors=None):
OrderForm.__init__(self, request, step, data, initial, errors)
is_first_step = step == checkout.CHECKOUT_STEP_FIRST
is_last_step = step == checkout.CHECKOUT_STEP_LAST
is_payment_step = step == checkout.CHECKOUT_STEP_PAYMENT
# Get list of country names
countries = make_choices(get_country_names_list())
if settings.SHOP_CHECKOUT_STEPS_SPLIT:
if is_first_step:
# Change country widgets to a Select widget
self.fields["billing_detail_country"].widget = forms.Select(choices=countries)
self.fields["billing_detail_country"].initial = settings.SHOP_DEFAULT_COUNTRY
self.fields["shipping_detail_country"].widget = forms.Select(choices=countries)
self.fields["shipping_detail_country"].initial= settings.SHOP_DEFAULT_COUNTRY
if is_payment_step:
# Make card number and cvv fields use the data encrypted widget
self.fields["card_number"].widget = DataEncryptedTextInput()
self.fields["card_ccv"].widget = DataEncryptedPasswordInput()
else:
# Change country widgets to a Select widget
self.fields["billing_detail_country"].widget = forms.Select(choices=countries)
self.fields["billing_detail_country"].initial = settings.SHOP_DEFAULT_COUNTRY
self.fields["shipping_detail_country"].widget = forms.Select(choices=countries)
self.fields["shipping_detail_country"].initial= settings.SHOP_DEFAULT_COUNTRY
if settings.SHOP_PAYMENT_STEP_ENABLED:
# Make card number and cvv fields use the data encrypted widget
self.fields["card_number"].widget = DataEncryptedTextInput()
self.fields["card_ccv"].widget = DataEncryptedPasswordInput()
| ryneeverett/cartridge_braintree | cartridge_braintree/forms.py | Python | mit | 3,807 |
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from impact.tests.api_test_case import APITestCase
from impact.tests.factories import JudgingRoundFactory
class TestJudgingRound(APITestCase):
def test_str(self):
judging_round = JudgingRoundFactory()
judging_round_string = str(judging_round)
assert judging_round.name in judging_round_string
assert str(judging_round.program) in judging_round_string
| masschallenge/impact-api | web/impact/impact/tests/test_judging_round.py | Python | mit | 445 |
__author__ = 'sarangis'
from src.ir.function import *
from src.ir.module import *
from src.ir.instructions import *
BINARY_OPERATORS = {
'+': lambda x, y: x + y,
'-': lambda x, y: x - y,
'*': lambda x, y: x * y,
'**': lambda x, y: x ** y,
'/': lambda x, y: x / y,
'//': lambda x, y: x // y,
'<<': lambda x, y: x << y,
'>>': lambda x, y: x >> y,
'%': lambda x, y: x % type(x)(y),
'&': lambda x, y: x & y,
'|': lambda x, y: x | y,
'^': lambda x, y: x ^ y,
}
class IRBuilder:
""" The main builder to be used for creating instructions. This has to be used to insert / create / modify instructions
This class will have to support all the other class creating it.
"""
def __init__(self, current_module = None, context=None):
self.__module = current_module
self.__insertion_point = None
self.__insertion_point_idx = 0
self.__orphaned_instructions = []
self.__context = context
self.__current_bb = None
@property
def module(self):
return self.__module
@module.setter
def module(self, mod):
self.__module = mod
@property
def context(self):
return self.__context
@context.setter
def context(self, ctx):
self.__context = ctx
def get_current_bb(self):
assert self.__current_bb is not None
return self.__current_bb
def insert_after(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = 0
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx is None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
else:
self.__insertion_point_idx += 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def insert_before(self, ip):
if isinstance(ip, BasicBlock):
self.__insertion_point = ip
self.__insertion_point_idx = -1
self.__current_bb = ip
elif isinstance(ip, Instruction):
self.__insertion_point = ip
self.__insertion_point_idx = ip.parent.find_instruction_idx(ip)
if self.__insertion_point_idx == None:
raise InvalidInstructionException("Count not find instruction in its parent basic block")
elif self.__insertion_point_idx == 0:
self.__insertion_point_idx = 0
else:
self.__insertion_point_idx -= 1
else:
raise InvalidTypeException("Expected either Basic Block or Instruction")
def __add_instruction(self, inst):
if self.__insertion_point_idx == -1:
# This is an orphaned instruction
self.__orphaned_instructions.append(inst)
elif isinstance(self.__insertion_point, BasicBlock):
self.__insertion_point.instructions.append(inst)
self.__insertion_point = inst
elif isinstance(self.__insertion_point, Instruction):
bb = self.__insertion_point.parent
bb.instructions.insert(self.__insertion_point_idx + 1, inst)
self.__insertion_point_idx += 1
self.__insertion_point = inst
else:
raise Exception("Could not add instruction")
def const_fold_binary_op(self, lhs, rhs, op):
return None
# if isinstance(lhs, Number) and isinstance(rhs, Number):
# lhs = lhs.number
# rhs = rhs.number
# result = BINARY_OPERATORS[op](lhs, rhs)
# return Number(result)
# else:
# return None
def create_function(self, name, args):
f = Function(name, args)
self.__module.functions[name] = f
return f
def set_entry_point(self, function):
self.__module.entry_point = function
def create_global(self, name, initializer):
g = Global(name, initializer)
self.__module.add_global(g)
def create_basic_block(self, name, parent):
bb = BasicBlock(name, parent)
return bb
def create_return(self, value = None, name=None):
ret_inst = ReturnInstruction(value)
self.__add_instruction(ret_inst)
def create_branch(self, bb, name=None):
if not isinstance(bb, BasicBlock):
raise InvalidTypeException("Expected a Basic Block")
branch_inst = BranchInstruction(bb, self.__current_bb, name)
self.__add_instruction(branch_inst)
return branch_inst
def create_cond_branch(self, cmp_inst, value, bb_true, bb_false, name=None):
cond_branch = ConditionalBranchInstruction(cmp_inst, value, bb_true, bb_false, self.__current_bb, name)
self.__add_instruction(cond_branch)
return cond_branch
def create_call(self, func, args, name=None):
call_inst = CallInstruction(func, args, self.__current_bb, name)
self.__add_instruction(call_inst)
return call_inst
def create_add(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '+')
if folded_inst is not None:
return folded_inst
add_inst = AddInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(add_inst)
return add_inst
def create_sub(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '-')
if folded_inst is not None:
return folded_inst
sub_inst = SubInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(sub_inst)
return sub_inst
def create_mul(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '*')
if folded_inst is not None:
return folded_inst
mul_inst = MulInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(mul_inst)
return mul_inst
def create_div(self, lhs, rhs, name=None):
folded_inst = self.const_fold_binary_op(lhs, rhs, '/')
if folded_inst is not None:
return folded_inst
div_inst = DivInstruction(lhs, rhs, self.__current_bb, name)
self.__add_instruction(div_inst)
return div_inst
def create_icmp(self, lhs, rhs, comparator, name=None):
icmp_inst = ICmpInstruction(CompareTypes.SLE, lhs, rhs, self.__current_bb, name)
self.__add_instruction(icmp_inst)
return icmp_inst
def create_select(self, cond, val_true, val_false, name=None):
select_inst = SelectInstruction(cond, val_true, val_false, self.__current_bb, name)
self.__add_instruction(select_inst)
return select_inst
def create_alloca(self, numEls=None, name=None):
alloca_inst = AllocaInstruction(numEls, self.__current_bb, name)
self.__add_instruction(alloca_inst)
return alloca_inst
def create_load(self, alloca):
load_inst = LoadInstruction(alloca, parent=self.__current_bb)
self.__add_instruction(load_inst)
return load_inst
def create_store(self, alloca, value):
store_inst = StoreInstruction(alloca, value, parent=self.__current_bb)
self.__add_instruction(store_inst)
return store_inst
def create_shl(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '<<')
if folded_inst is not None:
return folded_inst
shl_inst = ShiftLeftInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(shl_inst)
return shl_inst
def create_lshr(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '>>')
if folded_inst is not None:
return folded_inst
lshr_inst = LogicalShiftRightInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(lshr_inst)
return lshr_inst
def create_ashr(self, op1, op2, name=None):
ashr_inst = ArithmeticShiftRightInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(ashr_inst)
return ashr_inst
def create_and(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '&')
if folded_inst is not None:
return folded_inst
and_inst = AndInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(and_inst)
return and_inst
def create_or(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '|')
if folded_inst is not None:
return folded_inst
or_inst = OrInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(or_inst)
return or_inst
def create_xor(self, op1, op2, name=None):
folded_inst = self.const_fold_binary_op(op1, op2, '^')
if folded_inst is not None:
return folded_inst
xor_inst = XorInstruction(op1, op2, self.__current_bb, name)
self.__add_instruction(xor_inst)
return xor_inst
def create_number(self, number):
number = Number(number)
return number
def create_string(self, string):
string_obj = String(string)
return string_obj
#def create_vector(self, baseTy, numElts, name=None):
# vecTy = VectorType(baseTy, numElts)
# alloca = self.create_alloca(vecTy, 1, None, name)
# vec = self.create_load(alloca)
# return vec | ssarangi/spiderjit | src/ir/irbuilder.py | Python | mit | 9,699 |
# -*- coding: utf-8 -*-
"""
pyvisa.visa
~~~~~~~~~~~
Module to provide an import shortcut for the most common VISA operations.
This file is part of PyVISA.
:copyright: 2014 by PyVISA Authors, see AUTHORS for more details.
:license: MIT, see COPYING for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from pyvisa import logger, __version__, log_to_screen, constants
from pyvisa.highlevel import ResourceManager
from pyvisa.errors import (Error, VisaIOError, VisaIOWarning, VisaTypeError,
UnknownHandler, OSNotSupported, InvalidBinaryFormat,
InvalidSession, LibraryError)
# This is needed to registry all resources.
from pyvisa.resources import Resource
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='PyVISA command-line utilities')
parser.add_argument('--backend', '-b', dest='backend', action='store', default=None,
help='backend to be used (default: ni)')
subparsers = parser.add_subparsers(title='command', dest='command')
info_parser = subparsers.add_parser('info', help='print information to diagnose PyVISA')
console_parser = subparsers.add_parser('shell', help='start the PyVISA console')
args = parser.parse_args()
if args.command == 'info':
from pyvisa import util
util.get_debug_info()
elif args.command == 'shell':
from pyvisa import shell
shell.main('@' + args.backend if args.backend else '')
| MatthieuDartiailh/pyvisa | visa.py | Python | mit | 1,581 |
# -*- coding: utf-8 -*-
"""
Created on Sat May 21 16:43:47 2016
@author: Pratik
"""
from ftplib import FTP
import os
# login and download file from ftp site and retrieve file (use default params)
def ftpDownloader(filename, host="ftp.pyclass.com", user="[email protected]", passwd="student123"):
ftp = FTP(host) # get the host url of ftp site
ftp.login(user, passwd) # login with username and password
ftp.cwd('Data') # change directory to Data
os.chdir("/Users/Pratik/Documents/Pratik/Work/practice/py-data-analysis") # change directory
print(ftp.nlst()) # print list of all files in dir
with open(filename, 'wb') as file: # open file and w/r
ftp.retrbinary('RETR %s' % filename, file.write) # read contents of pdf and write to our file | pratikmshah/practice | py-data-analysis/ftpDownloader-test.py | Python | mit | 831 |
class PresentDeliverer:
present_locations = {}
def __init__(self, name):
self.name = name
self.x = 0
self.y = 0
self.present_locations[self.get_key()]=1
def get_key(self):
return str(self.x)+"-"+str(self.y)
def status(self):
print(self.name + " x: "+str(self.x)+" y: "+str(self.y))
def move(self,instruction):
if instruction == ">":
self.x += 1
elif instruction == "<":
self.x -= 1
elif instruction == "^":
self.y += 1
else:
self.y -= 1
self.present_locations[self.get_key()]=1
def unique_houses(self):
print("Unique houses: "+str(len(self.present_locations.keys())))
filename = "..\inputs\day_three_input.txt"
f = open(filename)
input_line = f.readline()
santa = PresentDeliverer("Santa")
robo = PresentDeliverer("RoboSanta")
instruction_count = 0
for c in input_line:
instruction_count += 1
if (instruction_count % 2):
santa.move(c)
else:
robo.move(c)
santa.unique_houses() | caw13/adventofcode | python/day_three_part2.py | Python | mit | 939 |
import json
from sets import Set
from sys import maxint
import math
# tmp hacky functions for vec3
def norm2 (a):
return dot(a, a)
def dot ( a, b ):
return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]
def area (a, b, c):
u = [ b[0] - a[0], b[1] - a[1], b[2] - a[2] ]
v = [ c[0] - a[0], c[1] - a[1], c[2] - a[2] ]
dot_uv = dot(u, v)
cross2 = norm2(u) * norm2(v) - dot_uv * dot_uv
return math.sqrt(cross2) * 0.5
class DiagramJson:
def __init__(self):
self.json = {
'form': {
'vertices': {},
'vertices_2_force_faces': {}, # face array
'vertices_2_force_cells': {},
'vertices_external': None, # converted from set: vid: 1
'edges': {}
},
'force': {
'vertices': {},
'edges': {},
'faces_e': {},
'faces_v': {},
'cells': {}
},
'strength_scaler': {
'min': maxint,
'max': 0
},
'force_face_2_strength': {}
}
class Txt2JsonParser:
def __init__(self):
self.diagramJson = DiagramJson()
# # tmp data structures used only when parsing
# self.form_edge_2_vertex = {}
self.force_face_2_form_edge = {} # inverse index, for caluclate edge width i.e. area of faces (strength)
# self.form_vertex_external_count = {} # vid: count - 0, 1, 2
def readFormVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['form']['vertices']
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# create array for form_vertices to force_face array (cells)
v2fa[vertex[0]] = []
# print self.diagramJson.json
f.close()
def readFormEdge(self, filename_edge_vertex, filename_edge_to_force_face, filename_edge_ex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['form']['edges']
for line in f_edge_vertex:
edge = line.strip().split('\t')
e = edges[edge[0]] = {}
e['vertex'] = edge[1:]
# e['external'] = False
# print edge[0], e['vertex']
# print edges
f_edge_vertex.close()
v2fa = self.diagramJson.json['form']['vertices_2_force_faces']
f_edge_to_force_face = open(filename_edge_to_force_face)
for line in f_edge_to_force_face:
edge = line.strip().split('\t')
f = edge[1] if edge[1] != "Null" else None
edges[edge[0]]['force_face'] = f
edge_vertex = edges[edge[0]]['vertex']
for v in edge_vertex:
v2fa[v].append(f)
# force_face_2_form_edge (tmp structure) for compute strength
if f != None:
self.force_face_2_form_edge[f] = edge[0]
f_edge_to_force_face.close()
vertex_ex_set = Set()
f_edge_ex = open(filename_edge_ex)
for line in f_edge_ex:
edge = line.strip().split('\t')
for e in edge:
edges[e]['external'] = True
vertex_ex_set.add(edges[e]['vertex'][0])
vertex_ex_set.add(edges[e]['vertex'][1])
f_edge_ex.close()
self.diagramJson.json['form']['vertices_external'] = dict.fromkeys(vertex_ex_set, 1)
# label external force edge
for e in edges:
is_ex_vertex_0 = edges[e]['vertex'][0] in vertex_ex_set
is_ex_vertex_1 = edges[e]['vertex'][1] in vertex_ex_set
if is_ex_vertex_0 != is_ex_vertex_1:
# print edges[e]['vertex'][0], ':', is_ex_vertex_0, ' , ', edges[e]['vertex'][1], ':', is_ex_vertex_1
# force vector: from v0 to v1
edges[e]['ex_force'] = True
# print edges
# print self.diagramJson.json
def readForceVertex(self, filename):
f = open(filename)
v = self.diagramJson.json['force']['vertices']
for line in f:
vertex = line.strip().split('\t')
# print vertex
v[vertex[0]] = map(float, vertex[1:])
# print self.diagramJson.json
f.close()
def readForceEdge(self, filename_edge_vertex):
f_edge_vertex = open(filename_edge_vertex)
edges = self.diagramJson.json['force']['edges']
for line in f_edge_vertex:
edge = line.strip().split('\t')
edges[edge[0]] = edge[1:]
# print edges
f_edge_vertex.close()
# print self.diagramJson.json
def readForceFaceEdge(self, filename_face_edge):
f_face_edge = open(filename_face_edge)
edges = self.diagramJson.json['force']['edges']
faces_e = self.diagramJson.json['force']['faces_e']
# faces_v = self.diagramJson.json['force']['faces_v']
for line in f_face_edge:
face = line.strip().split('\t')
faces_e[face[0]] = face[1:]
# # convert face edge to face vertex
# cur_face_vertex = Set()
# for e in face[1:]:
# # extend vertex array
# # cur_face_vertex.extend(edges[e])
# for v in edges[e]:
# cur_face_vertex.add(v)
# faces_v[face[0]] = list(cur_face_vertex)
# print faces_v[face[0]]
f_face_edge.close()
# print self.diagramJson.json
def readForceFaceVertex(self, filename_face_vertex):
f_face_vertex = open(filename_face_vertex)
# fan shape order
faces_v = self.diagramJson.json['force']['faces_v']
strengthScaler = self.diagramJson.json['strength_scaler']
force_face_2_strength = self.diagramJson.json['force_face_2_strength']
v = self.diagramJson.json['force']['vertices']
e = self.diagramJson.json['form']['edges']
for line in f_face_vertex:
face = line.strip().split('\t')
faces_v[face[0]] = face[1:]
strength = 0
if len(face) == 4:
# tri
strength = area( v[face[1]], v[face[2]], v[face[3]] )
elif len(face) == 5:
# quad
strength = area( v[face[1]], v[face[2]], v[face[3]] ) + area( v[face[1]], v[face[3]], v[face[4]] )
else:
print 'Error: face ', face[0], ' is not tri or quad!!'
# if face[0] == '17f' or face[0] == '19f':
# print face[0], face[1:], map( lambda vid: v[vid], face[1:] ), area(v[face[1]], v[face[2]], v[face[3]]), strength
# e[ self.force_face_2_form_edge[face[0]] ]['strength'] = strength
force_face_2_strength[ face[0] ] = strength
curEdge = e[ self.force_face_2_form_edge[face[0]] ]
if 'external' not in curEdge and 'ex_force' not in curEdge:
strengthScaler['max'] = max(strength, strengthScaler['max'])
strengthScaler['min'] = min(strength, strengthScaler['min'])
f_face_vertex.close()
if __name__ == "__main__":
# foldername = "example_01"
# foldername = "example_02"
# foldername = "example_03"
foldername = "example_04"
parser = Txt2JsonParser()
parser.readFormVertex(foldername + "/form_v.txt")
parser.readFormEdge(foldername + "/form_e_v.txt", \
foldername + "/form_e_to_force_f.txt", \
foldername + "/form_e_ex.txt")
parser.readForceVertex(foldername + "/force_v.txt")
parser.readForceEdge(foldername + "/force_e_v.txt")
# parser.readForceFaceEdge(foldername + "/force_f_e.txt")
parser.readForceFaceVertex(foldername + "/force_f_v.txt")
with open(foldername + '/diagram.json', 'w') as out:
json.dump(parser.diagramJson.json, out)
| shrekshao/Polyhedron3D | assets/models/test/txt2json_parser.py | Python | mit | 8,122 |
#!/usr/bin/env python
__author__ = 'waroquiers'
import unittest
import numpy as np
from pymatgen.util.testing import PymatgenTest
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import ExplicitPermutationsAlgorithm
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import SeparationPlane
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import AllCoordinationGeometries
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import CoordinationGeometry
allcg = AllCoordinationGeometries()
class FakeSite:
def __init__(self, coords):
self.coords = coords
class CoordinationGeometriesTest(PymatgenTest):
def test_algorithms(self):
expl_algo = ExplicitPermutationsAlgorithm(permutations=[[0, 1, 2], [1, 2, 3]])
expl_algo2 = ExplicitPermutationsAlgorithm.from_dict(expl_algo.as_dict)
self.assertEqual(expl_algo.permutations, expl_algo2.permutations)
sepplane_algos_oct = allcg['O:6'].algorithms
self.assertEqual(len(sepplane_algos_oct[0].safe_separation_permutations()), 24)
self.assertEqual(len(sepplane_algos_oct[1].safe_separation_permutations()), 36)
sepplane_algos_oct_0 = SeparationPlane.from_dict(sepplane_algos_oct[0].as_dict)
self.assertEqual(sepplane_algos_oct[0].plane_points, sepplane_algos_oct_0.plane_points)
self.assertEqual(sepplane_algos_oct[0].mirror_plane, sepplane_algos_oct_0.mirror_plane)
self.assertEqual(sepplane_algos_oct[0].ordered_plane, sepplane_algos_oct_0.ordered_plane)
self.assertEqual(sepplane_algos_oct[0].point_groups, sepplane_algos_oct_0.point_groups)
self.assertEqual(sepplane_algos_oct[0].ordered_point_groups, sepplane_algos_oct_0.ordered_point_groups)
self.assertTrue(all([np.array_equal(perm, sepplane_algos_oct_0.explicit_optimized_permutations[iperm])
for iperm, perm in enumerate(sepplane_algos_oct[0].explicit_optimized_permutations)]))
self.assertEqual(sepplane_algos_oct[0].__str__(),
'Separation plane algorithm with the following reference separation :\n'
'[[4]] | [[0, 2, 1, 3]] | [[5]]')
def test_hints(self):
hints = CoordinationGeometry.NeighborsSetsHints(hints_type='single_cap',
options={'cap_index': 2, 'csm_max': 8})
myhints = hints.hints({'csm': 12.0})
self.assertEqual(myhints, [])
hints2 = CoordinationGeometry.NeighborsSetsHints.from_dict(hints.as_dict())
self.assertEqual(hints.hints_type, hints2.hints_type)
self.assertEqual(hints.options, hints2.options)
def test_coordination_geometry(self):
cg_oct = allcg['O:6']
cg_oct2 = CoordinationGeometry.from_dict(cg_oct.as_dict())
self.assertArrayAlmostEqual(cg_oct.central_site, cg_oct2.central_site)
self.assertArrayAlmostEqual(cg_oct.points, cg_oct2.points)
self.assertEqual(cg_oct.__str__(), 'Coordination geometry type : Octahedron (IUPAC: OC-6 || IUCr: [6o])\n'
'\n'
' - coordination number : 6\n'
' - list of points :\n'
' - [0.0, 0.0, 1.0]\n'
' - [0.0, 0.0, -1.0]\n'
' - [1.0, 0.0, 0.0]\n'
' - [-1.0, 0.0, 0.0]\n'
' - [0.0, 1.0, 0.0]\n'
' - [0.0, -1.0, 0.0]\n'
'------------------------------------------------------------\n')
self.assertEqual(cg_oct.__len__(), 6)
self.assertEqual(cg_oct.ce_symbol, cg_oct.mp_symbol)
self.assertTrue(cg_oct.is_implemented())
self.assertEqual(cg_oct.get_name(), 'Octahedron')
self.assertEqual(cg_oct.IUPAC_symbol, 'OC-6')
self.assertEqual(cg_oct.IUPAC_symbol_str, 'OC-6')
self.assertEqual(cg_oct.IUCr_symbol, '[6o]')
self.assertEqual(cg_oct.IUCr_symbol_str, '[6o]')
cg_oct.permutations_safe_override = True
self.assertEqual(cg_oct.number_of_permutations, 720.0)
self.assertEqual(cg_oct.ref_permutation([0, 3, 2, 4, 5, 1]), (0, 3, 1, 5, 2, 4))
sites = [FakeSite(coords=pp) for pp in cg_oct.points]
faces = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]]
self.assertArrayAlmostEqual(cg_oct.faces(sites=sites, permutation=[0, 3, 2, 4, 5, 1]), faces)
faces = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]]]
self.assertArrayAlmostEqual(cg_oct.faces(sites=sites), faces)
edges = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 0.0, -1.0]],
[[-1.0, 0.0, 0.0], [1.0, 0.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, 0.0, -1.0]],
[[0.0, 1.0, 0.0], [0.0, -1.0, 0.0]],
[[0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]]
self.assertArrayAlmostEqual(cg_oct.edges(sites=sites, permutation=[0, 3, 2, 4, 5, 1]), edges)
edges = [[[0.0, 0.0, 1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [-1.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, 1.0], [0.0, -1.0, 0.0]],
[[0.0, 0.0, -1.0], [1.0, 0.0, 0.0]],
[[0.0, 0.0, -1.0], [-1.0, 0.0, 0.0]],
[[0.0, 0.0, -1.0], [0.0, 1.0, 0.0]],
[[0.0, 0.0, -1.0], [0.0, -1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[1.0, 0.0, 0.0], [0.0, -1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, 1.0, 0.0]],
[[-1.0, 0.0, 0.0], [0.0, -1.0, 0.0]]]
self.assertArrayAlmostEqual(cg_oct.edges(sites=sites), edges)
self.assertArrayAlmostEqual(cg_oct.solid_angles(),
[2.0943951, 2.0943951, 2.0943951, 2.0943951, 2.0943951, 2.0943951])
pmeshes = cg_oct.get_pmeshes(sites=sites)
self.assertEqual(pmeshes[0]['pmesh_string'],
'14\n 0.00000000 0.00000000 1.00000000\n'
' 0.00000000 0.00000000 -1.00000000\n'
' 1.00000000 0.00000000 0.00000000\n'
' -1.00000000 0.00000000 0.00000000\n'
' 0.00000000 1.00000000 0.00000000\n'
' 0.00000000 -1.00000000 0.00000000\n'
' 0.33333333 0.33333333 0.33333333\n'
' 0.33333333 -0.33333333 0.33333333\n'
' -0.33333333 0.33333333 0.33333333\n'
' -0.33333333 -0.33333333 0.33333333\n'
' 0.33333333 0.33333333 -0.33333333\n'
' 0.33333333 -0.33333333 -0.33333333\n'
' -0.33333333 0.33333333 -0.33333333\n'
' -0.33333333 -0.33333333 -0.33333333\n'
'8\n4\n0\n2\n4\n0\n4\n0\n2\n5\n0\n4\n0\n3\n4\n0\n'
'4\n0\n3\n5\n0\n4\n1\n2\n4\n1\n4\n1\n2\n5\n1\n4\n'
'1\n3\n4\n1\n4\n1\n3\n5\n1\n')
allcg_str = allcg.__str__()
self.assertTrue('\n#=======================================================#\n'
'# List of coordination geometries currently implemented #\n'
'#=======================================================#\n'
'\nCoordination geometry type : Single neighbor (IUCr: [1l])\n\n'
' - coordination number : 1\n'
' - list of points :\n'
' - [0.0, 0.0, 1.0]\n'
'------------------------------------------------------------\n\n' in allcg_str)
self.assertTrue('Coordination geometry type : Trigonal plane (IUPAC: TP-3 || IUCr: [3l])\n\n'
' - coordination number : 3\n'
' - list of points :\n' in allcg_str)
all_symbols = [u'S:1', u'L:2', u'A:2', u'TL:3', u'TY:3', u'TS:3', u'T:4', u'S:4', u'SY:4', u'SS:4',
u'PP:5', u'S:5', u'T:5', u'O:6', u'T:6', u'PP:6', u'PB:7', u'ST:7', u'ET:7', u'FO:7',
u'C:8', u'SA:8', u'SBT:8', u'TBT:8', u'DD:8', u'DDPN:8', u'HB:8', u'BO_1:8', u'BO_2:8',
u'BO_3:8', u'TC:9', u'TT_1:9', u'TT_2:9', u'TT_3:9', u'HD:9', u'TI:9', u'SMA:9', u'SS:9',
u'TO_1:9', u'TO_2:9', u'TO_3:9', u'PP:10', u'PA:10', u'SBSA:10', u'MI:10', u'S:10',
u'H:10', u'BS_1:10', u'BS_2:10', u'TBSA:10', u'PCPA:11', u'H:11', u'SH:11', u'CO:11',
u'DI:11', u'I:12', u'PBP:12', u'TT:12', u'C:12', u'AC:12', u'SC:12', u'S:12', u'HP:12',
u'HA:12', u'SH:13', u'DD:20', u'UNKNOWN', u'UNCLEAR']
self.assertEqual(len(allcg.get_geometries()), 68)
self.assertEqual(len(allcg.get_geometries(coordination=3)), 3)
self.assertEqual(sorted(allcg.get_geometries(returned='mp_symbol')), sorted(all_symbols))
self.assertEqual(sorted(allcg.get_geometries(returned='mp_symbol', coordination=3)),
['TL:3', 'TS:3', 'TY:3'])
self.assertEqual(allcg.get_symbol_name_mapping(coordination=3),
{u'TY:3': u'Triangular non-coplanar', u'TL:3': u'Trigonal plane', u'TS:3': u'T-shaped'})
self.assertEqual(allcg.get_symbol_cn_mapping(coordination=3),
{u'TY:3': 3, u'TL:3': 3, u'TS:3': 3})
self.assertEqual(sorted(allcg.get_implemented_geometries(coordination=4, returned='mp_symbol')),
[u'S:4', u'SS:4', u'SY:4', u'T:4'])
self.assertEqual(sorted(allcg.get_not_implemented_geometries(returned='mp_symbol')),
[u'CO:11', u'DD:20', u'H:10', u'S:10', u'S:12', u'UNCLEAR', u'UNKNOWN'])
self.assertEqual(allcg.get_geometry_from_name('Octahedron').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_name('Octahedran')
self.assertEqual(str(cm.exception), 'No coordination geometry found with name "Octahedran"')
self.assertEqual(allcg.get_geometry_from_IUPAC_symbol('OC-6').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_IUPAC_symbol('OC-7')
self.assertEqual(str(cm.exception), 'No coordination geometry found with IUPAC symbol "OC-7"')
self.assertEqual(allcg.get_geometry_from_IUCr_symbol('[6o]').mp_symbol, cg_oct.mp_symbol)
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_IUCr_symbol('[6oct]')
self.assertEqual(str(cm.exception), 'No coordination geometry found with IUCr symbol "[6oct]"')
with self.assertRaises(LookupError) as cm:
allcg.get_geometry_from_mp_symbol('O:7')
self.assertEqual(str(cm.exception), 'No coordination geometry found with mp_symbol "O:7"')
self.assertEqual(allcg.pretty_print(maxcn=4),
'+-------------------------+\n| Coordination geometries |\n+-------------------------+\n'
'\n==>> CN = 1 <<==\n - S:1 : Single neighbor\n\n'
'==>> CN = 2 <<==\n'
' - L:2 : Linear\n - A:2 : Angular\n\n'
'==>> CN = 3 <<==\n'
' - TL:3 : Trigonal plane\n - TY:3 : Triangular non-coplanar\n - TS:3 : T-shaped\n\n'
'==>> CN = 4 <<==\n - T:4 : Tetrahedron\n - S:4 : Square plane\n'
' - SY:4 : Square non-coplanar\n - SS:4 : See-saw\n\n')
self.assertEqual(allcg.pretty_print(maxcn=2, type='all_geometries_latex'),
'\\subsection*{Coordination 1}\n\n\\begin{itemize}\n'
'\\item S:1 $\\rightarrow$ Single neighbor (IUPAC : None - IUCr : $[$1l$]$)\n'
'\\end{itemize}\n\n\\subsection*{Coordination 2}\n\n\\begin{itemize}\n'
'\\item L:2 $\\rightarrow$ Linear (IUPAC : L-2 - IUCr : $[$2l$]$)\n'
'\\item A:2 $\\rightarrow$ Angular (IUPAC : A-2 - IUCr : $[$2n$]$)\n'
'\\end{itemize}\n\n')
self.assertEqual(allcg.pretty_print(maxcn=2, type='all_geometries_latex_images'),
'\\section*{Coordination 1}\n\n\\subsubsection*{S:1 : Single neighbor}\n\n'
'IUPAC : None\n\nIUCr : [1l]\n\n\\begin{center}\n'
'\\includegraphics[scale=0.15]{images/S_1.png}\n'
'\\end{center}\n\n\\section*{Coordination 2}\n\n'
'\\subsubsection*{L:2 : Linear}\n\nIUPAC : L-2\n\n'
'IUCr : [2l]\n\n\\begin{center}\n\\includegraphics[scale=0.15]{images/L_2.png}\n'
'\\end{center}\n\n\\subsubsection*{A:2 : Angular}\n\nIUPAC : A-2\n\nIUCr : [2n]\n\n'
'\\begin{center}\n\\includegraphics[scale=0.15]{images/A_2.png}\n\\end{center}\n\n')
self.assertDictEqual(allcg.minpoints, {6: 2, 7: 2, 8: 2, 9: 2, 10: 2, 11: 2, 12: 2, 13: 3})
self.assertDictEqual(allcg.maxpoints, {6: 5, 7: 5, 8: 6, 9: 7, 10: 6, 11: 5, 12: 8, 13: 6})
self.assertDictEqual(allcg.maxpoints_inplane, {6: 5, 7: 5, 8: 6, 9: 7, 10: 6, 11: 5, 12: 8, 13: 6})
self.assertDictEqual(allcg.separations_cg, {6: {(0, 3, 3): [u'O:6', u'T:6'],
(1, 4, 1): [u'O:6'],
(0, 5, 1): [u'PP:6'],
(2, 2, 2): [u'PP:6'],
(0, 4, 2): [u'T:6']},
7: {(1, 3, 3): [u'ET:7', u'FO:7'],
(2, 3, 2): [u'PB:7', u'ST:7', u'ET:7'],
(1, 4, 2): [u'ST:7', u'FO:7'],
(1, 5, 1): [u'PB:7']},
8: {(1, 6, 1): [u'HB:8'],
(0, 4, 4):
[u'C:8', u'SA:8', u'SBT:8'],
(1, 4, 3): [u'SA:8', u'SBT:8', u'BO_2:8', u'BO_3:8'],
(2, 4, 2): [u'C:8', u'TBT:8', u'DD:8', u'DDPN:8', u'HB:8',
u'BO_1:8', u'BO_1:8', u'BO_2:8', u'BO_2:8',
u'BO_3:8', u'BO_3:8']},
9: {(3, 3, 3): [u'TT_1:9', u'TT_1:9', u'TT_2:9', u'SMA:9',
u'SMA:9', u'TO_1:9', u'TO_3:9'],
(0, 6, 3): [u'TC:9'],
(2, 4, 3): [u'TC:9', u'TT_2:9', u'TT_3:9', u'TI:9',
u'SS:9', u'TO_1:9', u'TO_1:9', u'TO_2:9',
u'TO_3:9'],
(1, 3, 5): [u'TI:9'],
(1, 4, 4): [u'TT_1:9', u'SMA:9', u'SS:9'],
(2, 3, 4): [u'TC:9'],
(2, 5, 2): [u'TT_3:9', u'SS:9', u'TO_2:9'],
(1, 7, 1): [u'HD:9']},
10: {(0, 5, 5): [u'PP:10', u'PA:10'],
(3, 4, 3): [u'PA:10', u'SBSA:10', u'MI:10',
u'BS_2:10', u'TBSA:10'],
(2, 6, 2): [u'BS_1:10'],
(2, 4, 4): [u'PP:10', u'MI:10', u'BS_2:10'],
(3, 3, 4): [u'SBSA:10'],
(1, 4, 5): [u'BS_2:10'],
(0, 4, 6): [u'BS_1:10', u'TBSA:10']},
11: {(4, 3, 4): [u'PCPA:11'],
(3, 4, 4): [u'DI:11'],
(1, 5, 5): [u'PCPA:11', u'DI:11'],
(3, 5, 3): [u'H:11']},
12: {(3, 3, 6): [u'TT:12'],
(2, 4, 6): [u'TT:12'],
(0, 6, 6): [u'HP:12', u'HA:12'],
(3, 6, 3): [u'C:12', u'AC:12'],
(4, 4, 4): [u'I:12', u'PBP:12', u'C:12', u'HP:12'],
(0, 8, 4): [u'SC:12']},
13: {(0, 6, 7): [u'SH:13']}})
if __name__ == "__main__":
unittest.main()
| tschaume/pymatgen | pymatgen/analysis/chemenv/coordination_environments/tests/test_coordination_geometries.py | Python | mit | 19,397 |
import logging
import os
import subprocess
import traceback
from zipfile import ZipFile
from os import listdir
from os.path import isfile, join
'''
A utility python module containing a set of methods necessary for this kbase
module.
'''
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
def create_logger(log_dir, name):
"""Create a logger
args: name (str): name of logger
returns: logger (obj): logging.Logger instance
"""
logger = logging.getLogger(name)
fmt = logging.Formatter('%(asctime)s - %(process)d - %(name)s - '
' %(levelname)s -%(message)s')
hdl = logging.FileHandler(os.path.join(log_dir, name + '.log'))
hdl.setFormatter(fmt)
logger.addHandler(hdl)
return logger
def if_obj_exists(logger, ws_client, ws_id, o_type, obj_l):
obj_list = ws_client.list_objects({"workspaces": [ws_id], "type": o_type, 'showHidden': 1})
obj_names = [i[1] for i in obj_list]
existing_names = [i for i in obj_l if i in obj_names]
obj_ids = None
if len(existing_names) != 0:
e_queries = [{'name': j, 'workspace': ws_id} for j in existing_names]
e_infos = ws_client.get_object_info_new({"objects": e_queries})
obj_ids = [(str(k[1]), (str(k[6]) + '/' + str(k[0]) + '/' + str(k[4]))) for k in e_infos]
return obj_ids
def log(message, level=logging.INFO, logger=None):
if logger is None:
if level == logging.DEBUG:
print('\nDEBUG: ' + message + '\n')
elif level == logging.INFO:
print('\nINFO: ' + message + '\n')
elif level == logging.WARNING:
print('\nWARNING: ' + message + '\n')
elif level == logging.ERROR:
print('\nERROR: ' + message + '\n')
elif level == logging.CRITICAL:
print('\nCRITICAL: ' + message + '\n')
else:
logger.log(level, '\n' + message + '\n')
def zip_files(logger, src_path, output_fn):
"""
Compress all index files (not directory) into an output zip file on disk.
"""
files = [f for f in listdir(src_path) if isfile(join(src_path, f))]
with ZipFile(output_fn, 'w', allowZip64=True) as izip:
for f in files:
izip.write(join(src_path, f), f)
def unzip_files(logger, src_fn, dst_path):
"""
Extract all index files into an output zip file on disk.
"""
with ZipFile(src_fn, 'r') as ozip:
ozip.extractall(dst_path)
def whereis(program):
"""
returns path of program if it exists in your ``$PATH`` variable or `
`None`` otherwise
"""
for path in os.environ.get('PATH', '').split(':'):
if os.path.exists(os.path.join(path, program)) and not os.path.isdir(
os.path.join(path, program)):
return os.path.join(path, program)
return None
def runProgram(logger=None,
progName=None,
argStr=None,
script_dir=None,
working_dir=None):
"""
Convenience func to handle calling and monitoring output of external programs.
:param progName: name of system program command
:param argStr: string containing command line options for ``progName``
:returns: subprocess.communicate object
"""
# Ensure program is callable.
if script_dir is not None:
progPath = os.path.join(script_dir, progName)
else:
progPath = progName
progPath = whereis(progName)
if not progPath:
raise RuntimeError(
None,
'{0} command not found in your PATH environmental variable. {1}'.format(
progName,
os.environ.get(
'PATH',
'')))
# Construct shell command
cmdStr = "%s %s" % (progPath, argStr)
print "Executing : " + cmdStr
if logger is not None:
logger.info("Executing : " + cmdStr)
# if working_dir is None:
logger.info("Executing: " + cmdStr + " on cwd")
else:
logger.info("Executing: " + cmdStr + " on " + working_dir)
# Set up process obj
process = subprocess.Popen(cmdStr,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir)
# Get results
result, stderr = process.communicate()
# print result
# print stderr
# keep this until your code is stable for easier debugging
if logger is not None and result is not None and len(result) > 0:
logger.info(result)
else:
print result
if logger is not None and stderr is not None and len(stderr) > 0:
logger.info(stderr)
else:
print stderr
# Check returncode for success/failure
if process.returncode != 0:
raise Exception("Command execution failed {0}".format(
"".join(traceback.format_exc())))
raise RuntimeError(
'Return Code : {0} , result {1} , progName {2}'.format(
process.returncode, result, progName))
# Return result
return {"result": result, "stderr": stderr}
def check_sys_stat(logger):
check_disk_space(logger)
check_memory_usage(logger)
check_cpu_usage(logger)
def check_disk_space(logger):
runProgram(logger=logger, progName="df", argStr="-h")
def check_memory_usage(logger):
runProgram(logger=logger, progName="vmstat", argStr="-s")
def check_cpu_usage(logger):
runProgram(logger=logger, progName="mpstat", argStr="-P ALL")
| arfathpasha/kb_cufflinks | lib/kb_cufflinks/core/script_utils.py | Python | mit | 5,654 |
import py
from rpython.flowspace.model import SpaceOperation, Constant, Variable
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
from rpython.translator.unsimplify import varoftype
from rpython.rlib import jit
from rpython.jit.codewriter import support, call
from rpython.jit.codewriter.call import CallControl
from rpython.jit.codewriter.effectinfo import EffectInfo
class FakePolicy:
def look_inside_graph(self, graph):
return True
def test_graphs_from_direct_call():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
f = lltype.functionptr(F, 'f', graph='fgraph')
v = varoftype(lltype.Signed)
op = SpaceOperation('direct_call', [Constant(f, lltype.Ptr(F))], v)
#
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None # residual call
#
lst = cc.graphs_from(op, {'fgraph': True}.__contains__)
assert lst == ['fgraph'] # normal call
def test_graphs_from_indirect_call():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
v = varoftype(lltype.Signed)
graphlst = ['f1graph', 'f2graph']
op = SpaceOperation('indirect_call', [varoftype(lltype.Ptr(F)),
Constant(graphlst, lltype.Void)], v)
#
lst = cc.graphs_from(op, {'f1graph': True, 'f2graph': True}.__contains__)
assert lst == ['f1graph', 'f2graph'] # normal indirect call
#
lst = cc.graphs_from(op, {'f1graph': True}.__contains__)
assert lst == ['f1graph'] # indirect call, look only inside some graphs
#
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None # indirect call, don't look inside any graph
def test_graphs_from_no_target():
cc = CallControl()
F = lltype.FuncType([], lltype.Signed)
v = varoftype(lltype.Signed)
op = SpaceOperation('indirect_call', [varoftype(lltype.Ptr(F)),
Constant(None, lltype.Void)], v)
lst = cc.graphs_from(op, {}.__contains__)
assert lst is None
# ____________________________________________________________
class FakeJitDriverSD:
def __init__(self, portal_graph):
self.portal_graph = portal_graph
self.portal_runner_ptr = "???"
def test_find_all_graphs():
def g(x):
return x + 2
def f(x):
return g(x) + 1
rtyper = support.annotate(f, [7])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
funcs = set([graph.func for graph in res])
assert funcs == set([f, g])
def test_find_all_graphs_without_g():
def g(x):
return x + 2
def f(x):
return g(x) + 1
rtyper = support.annotate(f, [7])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(jitdrivers_sd=[jitdriver_sd])
class CustomFakePolicy:
def look_inside_graph(self, graph):
assert graph.name == 'g'
return False
res = cc.find_all_graphs(CustomFakePolicy())
funcs = [graph.func for graph in res]
assert funcs == [f]
# ____________________________________________________________
def test_guess_call_kind_and_calls_from_graphs():
class portal_runner_obj:
graph = object()
class FakeJitDriverSD:
portal_runner_ptr = portal_runner_obj
g = object()
g1 = object()
cc = CallControl(jitdrivers_sd=[FakeJitDriverSD()])
cc.candidate_graphs = [g, g1]
op = SpaceOperation('direct_call', [Constant(portal_runner_obj)],
Variable())
assert cc.guess_call_kind(op) == 'recursive'
class fakeresidual:
_obj = object()
op = SpaceOperation('direct_call', [Constant(fakeresidual)],
Variable())
assert cc.guess_call_kind(op) == 'residual'
class funcptr:
class _obj:
class graph:
class func:
oopspec = "spec"
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
assert cc.guess_call_kind(op) == 'builtin'
class funcptr:
class _obj:
graph = g
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
res = cc.graphs_from(op)
assert res == [g]
assert cc.guess_call_kind(op) == 'regular'
class funcptr:
class _obj:
graph = object()
op = SpaceOperation('direct_call', [Constant(funcptr)],
Variable())
res = cc.graphs_from(op)
assert res is None
assert cc.guess_call_kind(op) == 'residual'
h = object()
op = SpaceOperation('indirect_call', [Variable(),
Constant([g, g1, h])],
Variable())
res = cc.graphs_from(op)
assert res == [g, g1]
assert cc.guess_call_kind(op) == 'regular'
op = SpaceOperation('indirect_call', [Variable(),
Constant([h])],
Variable())
res = cc.graphs_from(op)
assert res is None
assert cc.guess_call_kind(op) == 'residual'
# ____________________________________________________________
def test_get_jitcode(monkeypatch):
from rpython.jit.codewriter.test.test_flatten import FakeCPU
class FakeRTyper:
class annotator:
translator = None
def getfunctionptr(graph):
F = lltype.FuncType([], lltype.Signed)
return lltype.functionptr(F, 'bar')
monkeypatch.setattr(call, 'getfunctionptr', getfunctionptr)
cc = CallControl(FakeCPU(FakeRTyper()))
class somegraph:
name = "foo"
jitcode = cc.get_jitcode(somegraph)
assert jitcode is cc.get_jitcode(somegraph) # caching
assert jitcode.name == "foo"
pending = list(cc.enum_pending_graphs())
assert pending == [(somegraph, jitcode)]
# ____________________________________________________________
def test_jit_force_virtualizable_effectinfo():
py.test.skip("XXX add a test for CallControl.getcalldescr() -> EF_xxx")
def test_releases_gil_analyzer():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
T = rffi.CArrayPtr(rffi.TIME_T)
external = rffi.llexternal("time", [T], rffi.TIME_T, releasegil=True)
@jit.dont_look_inside
def f():
return external(lltype.nullptr(T.TO))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
[op] = block.operations
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.is_call_release_gil() is False
def test_call_release_gil():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
T = rffi.CArrayPtr(rffi.TIME_T)
external = rffi.llexternal("time", [T], rffi.TIME_T, releasegil=True,
save_err=rffi.RFFI_SAVE_ERRNO)
# no jit.dont_look_inside in this test
def f():
return external(lltype.nullptr(T.TO))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[llext_graph] = [x for x in res if x.func is external]
[block, _] = list(llext_graph.iterblocks())
[op] = block.operations
tgt_tuple = op.args[0].value._obj.graph.func._call_aroundstate_target_
assert type(tgt_tuple) is tuple and len(tgt_tuple) == 2
call_target, saveerr = tgt_tuple
assert saveerr == rffi.RFFI_SAVE_ERRNO
call_target = llmemory.cast_ptr_to_adr(call_target)
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.is_call_release_gil() is True
assert call_descr.extrainfo.call_release_gil_target == (
call_target, rffi.RFFI_SAVE_ERRNO)
def test_random_effects_on_stacklet_switch():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
from rpython.translator.platform import CompilationError
try:
from rpython.rlib._rffi_stacklet import switch, handle
except CompilationError as e:
if "Unsupported platform!" in e.out:
py.test.skip("Unsupported platform!")
else:
raise e
@jit.dont_look_inside
def f():
switch(rffi.cast(handle, 0))
rtyper = support.annotate(f, [])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
op = block.operations[-1]
call_descr = cc.getcalldescr(op)
assert call_descr.extrainfo.has_random_effects()
def test_no_random_effects_for_rotateLeft():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
from rpython.rlib.rarithmetic import r_uint
if r_uint.BITS == 32:
py.test.skip("64-bit only")
from rpython.rlib.rmd5 import _rotateLeft
def f(n, m):
return _rotateLeft(r_uint(n), m)
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
[block, _] = list(f_graph.iterblocks())
op = block.operations[-1]
call_descr = cc.getcalldescr(op)
assert not call_descr.extrainfo.has_random_effects()
assert call_descr.extrainfo.check_is_elidable()
def test_elidable_kinds():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
@jit.elidable
def f1(n, m):
return n + m
@jit.elidable
def f2(n, m):
return [n, m] # may raise MemoryError
@jit.elidable
def f3(n, m):
if n > m:
raise ValueError
return n + m
def f(n, m):
a = f1(n, m)
b = f2(n, m)
c = f3(n, m)
return a + len(b) + c
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
for index, expected in [
(0, EffectInfo.EF_ELIDABLE_CANNOT_RAISE),
(1, EffectInfo.EF_ELIDABLE_OR_MEMORYERROR),
(2, EffectInfo.EF_ELIDABLE_CAN_RAISE)]:
call_op = f_graph.startblock.operations[index]
assert call_op.opname == 'direct_call'
call_descr = cc.getcalldescr(call_op)
assert call_descr.extrainfo.extraeffect == expected
def test_raise_elidable_no_result():
from rpython.jit.backend.llgraph.runner import LLGraphCPU
l = []
@jit.elidable
def f1(n, m):
l.append(n)
def f(n, m):
f1(n, m)
return n + m
rtyper = support.annotate(f, [7, 9])
jitdriver_sd = FakeJitDriverSD(rtyper.annotator.translator.graphs[0])
cc = CallControl(LLGraphCPU(rtyper), jitdrivers_sd=[jitdriver_sd])
res = cc.find_all_graphs(FakePolicy())
[f_graph] = [x for x in res if x.func is f]
call_op = f_graph.startblock.operations[0]
assert call_op.opname == 'direct_call'
with py.test.raises(Exception):
call_descr = cc.getcalldescr(call_op)
| jptomo/rpython-lang-scheme | rpython/jit/codewriter/test/test_call.py | Python | mit | 11,707 |
# imports
## core
import importlib
import logging
import os
import pprint
import sys
import StringIO
## 3rd party
import cherrypy
import requests
## local
def full_path(*extra):
return os.path.join(os.path.dirname(__file__), *extra)
sys.path.insert(0, full_path())
import db
logging.basicConfig()
sorry = 'This is only for US Citizens. Sorry and thank you for your time.'
class Root(object):
@cherrypy.expose
def index(self, tag):
redirect_url = db.urls[tag]
ip = cherrypy.request.headers['Remote-Addr']
request_url = 'http://ipinfo.io/{0}/country'.format(ip)
r = requests.get(request_url)
country = r.text.strip()
if country == 'US':
raise cherrypy.HTTPRedirect(redirect_url)
else:
return sorry
| metaperl/metaperl-proxy | myapp.py | Python | mit | 797 |
from battle_tested.beta.input_type_combos import input_type_combos
| CodyKochmann/battle_tested | battle_tested/beta/fuzz_planner.py | Python | mit | 68 |
import os
import sys
import numpy as np
import matplotlib.image as mpimg
from ..core.data import Data
from ..util import tryremove
URL = 'http://synthia-dataset.cvc.uab.cat/SYNTHIA_SEQS/'
SEQS = [ # SUMMER and WINTER from sequences `1 - 6`
'SYNTHIA-SEQS-01-SUMMER',
'SYNTHIA-SEQS-01-WINTER',
'SYNTHIA-SEQS-02-SUMMER',
'SYNTHIA-SEQS-02-WINTER',
'SYNTHIA-SEQS-04-SUMMER',
'SYNTHIA-SEQS-04-WINTER',
'SYNTHIA-SEQS-05-SUMMER',
'SYNTHIA-SEQS-05-WINTER',
'SYNTHIA-SEQS-06-SUMMER',
'SYNTHIA-SEQS-06-WINTER'
]
DEV_SEQS = ['SYNTHIA-SEQS-01-SUMMER']
class SynthiaData(Data):
dirs = ['synthia']
def __init__(self, data_dir, stat_log_dir=None,
development=True, fast_dir=None):
super().__init__(data_dir, stat_log_dir,
development=development,
fast_dir=fast_dir)
def _fetch_if_missing(self):
self._maybe_get_synthia()
def get_raw_dirs(self):
root_dir = os.path.join(self.current_dir, 'synthia')
dirs = []
seqs = os.listdir(root_dir)
for seq in seqs:
seq_dir = os.path.join(root_dir, seq, seq, 'RGB', 'Stereo_Left')
views = os.listdir(seq_dir)
for view in views:
view_dir = os.path.join(seq_dir, view)
dirs.extend([view_dir])
return dirs
def _maybe_get_synthia(self):
seqs = DEV_SEQS if self.development else SEQS
for seq in seqs:
root_dir = os.path.join(self.data_dir, 'synthia')
url = URL + seq + '.rar'
url_dir = os.path.join(root_dir, seq)
if not os.path.isdir(url_dir):
self._download_and_extract(url, url_dir, 'rar')
# Remove unused directories
tryremove(os.path.join(url_dir, seq, 'GT'))
tryremove(os.path.join(url_dir, seq, 'Depth'))
tryremove(os.path.join(url_dir, seq, 'CameraParams'))
tryremove(os.path.join(url_dir, 'RGB', 'Stereo_Right'))
| simonmeister/UnFlow | src/e2eflow/synthia/data.py | Python | mit | 2,040 |
import sys
from genStubs import *
stub = Stubs( "systemMessages", sys.argv[1], sys.argv[2] )
stub.include( "nanopb/IMessage.h" )
stub.include( "systemMessages/AGLMsg.pb.h" )
stub.include( "systemMessages/AGLOffsetMsg.pb.h" )
stub.include( "systemMessages/AGLRawMsg.pb.h" )
stub.include( "systemMessages/AbortLaunchMsg.pb.h" )
stub.include( "systemMessages/AccelGyroDataMsg.pb.h" )
stub.include( "systemMessages/AccelGyroDataRaw.pb.h" )
stub.include( "systemMessages/ActiveControlSourceNotification.pb.h" )
stub.include( "systemMessages/ActiveManeuverSourceNotification.pb.h" )
stub.include( "systemMessages/ActuatorConstants.pb.h" )
stub.include( "systemMessages/ActuatorPictureMsg.pb.h" )
stub.include( "systemMessages/ActuatorPortCalibration.pb.h" )
stub.include( "systemMessages/ActuatorPortConfigMsg.pb.h" )
stub.include( "systemMessages/ActuatorPowerBusMsg.pb.h" )
stub.include( "systemMessages/ActuatorTakePicture.pb.h" )
stub.include( "systemMessages/AeroTerminateMsg.pb.h" )
stub.include( "systemMessages/AirmailDebugLogSettingsMsg.pb.h" )
stub.include( "systemMessages/AirmailPoolStatsMsg.pb.h" )
stub.include( "systemMessages/AirspeedCalibrationDataMsg.pb.h" )
stub.include( "systemMessages/AltMSLCorrection.pb.h" )
stub.include( "systemMessages/AnnounceMsg.pb.h" )
stub.include( "systemMessages/AttCtrlConfig.pb.h" )
stub.include( "systemMessages/AuxControlMix.pb.h" )
stub.include( "systemMessages/AwxHeaderMsg.pb.h" )
stub.include( "systemMessages/BoardStatus.pb.h" )
stub.include( "systemMessages/ClientRequest.pb.h" )
stub.include( "systemMessages/ConnectionStatus.pb.h" )
stub.include( "systemMessages/ContingencyEventMap.pb.h" )
stub.include( "systemMessages/ContingencyEventStatus.pb.h" )
stub.include( "systemMessages/ControlLog.pb.h" )
stub.include( "systemMessages/ControlLogRateConfig.pb.h" )
stub.include( "systemMessages/ControlRequest.pb.h" )
stub.include( "systemMessages/DateOfLastConfigurationMsg.pb.h" )
stub.include( "systemMessages/DeviceManagerMsgs.pb.h" )
stub.include( "systemMessages/EffectorCmdsMsg.pb.h" )
stub.include( "systemMessages/EffectorStatusMsg.pb.h" )
stub.include( "systemMessages/EffectorSurfaceMap.pb.h" )
stub.include( "systemMessages/EthernetStatusMsg.pb.h" )
stub.include( "systemMessages/Example.pb.h" )
stub.include( "systemMessages/FileTransferMsg.pb.h" )
stub.include( "systemMessages/FlightStatus.pb.h" )
stub.include( "systemMessages/GCSConnectivityStatus.pb.h" )
stub.include( "systemMessages/GCSJobInfoMsg.pb.h" )
stub.include( "systemMessages/GPSData.pb.h" )
stub.include( "systemMessages/GPSRestartMsg.pb.h" )
stub.include( "systemMessages/GPSStatus.pb.h" )
stub.include( "systemMessages/Geofence.pb.h" )
stub.include( "systemMessages/GuidanceConfig.pb.h" )
stub.include( "systemMessages/HealthEventMsg.pb.h" )
stub.include( "systemMessages/HobbsMeter.pb.h" )
stub.include( "systemMessages/IMUOrientationConfig.pb.h" )
stub.include( "systemMessages/INSAccelData.pb.h" )
stub.include( "systemMessages/INSAncillaryData.pb.h" )
stub.include( "systemMessages/INSAttitudeData.pb.h" )
stub.include( "systemMessages/INSConfigMsg.pb.h" )
stub.include( "systemMessages/INSCorrectionData.pb.h" )
stub.include( "systemMessages/INSCorrectionRequest.pb.h" )
stub.include( "systemMessages/INSEnums.pb.h" )
stub.include( "systemMessages/INSErrorData.pb.h" )
stub.include( "systemMessages/INSLog.pb.h" )
stub.include( "systemMessages/INSMessageComponents.pb.h" )
stub.include( "systemMessages/INSPosVelData.pb.h" )
stub.include( "systemMessages/INSStatusData.pb.h" )
stub.include( "systemMessages/KillMode.pb.h" )
stub.include( "systemMessages/LaneSplitter.pb.h" )
stub.include( "systemMessages/LaneSplitterStatsMsg.pb.h" )
stub.include( "systemMessages/LogInformationEntry.pb.h" )
stub.include( "systemMessages/LogManagement.pb.h" )
stub.include( "systemMessages/LogRequestMsg.pb.h" )
stub.include( "systemMessages/MPUCalConfig.pb.h" )
stub.include( "systemMessages/MRAirframeConfig.pb.h" )
stub.include( "systemMessages/MagCalibrationParameters.pb.h" )
stub.include( "systemMessages/MagData.pb.h" )
stub.include( "systemMessages/MagDataRaw.pb.h" )
stub.include( "systemMessages/MagOrientationConfigMsg.pb.h" )
stub.include( "systemMessages/Maneuver.pb.h" )
stub.include( "systemMessages/ManeuverExecutionStatus.pb.h" )
stub.include( "systemMessages/ManeuverPauseResumeMsg.pb.h" )
stub.include( "systemMessages/MapRcInputToFlightChannelMsg.pb.h" )
stub.include( "systemMessages/Menagerie.pb.h" )
stub.include( "systemMessages/MfgParamsMsg.pb.h" )
stub.include( "systemMessages/Mission.pb.h" )
stub.include( "systemMessages/MissionExec.pb.h" )
stub.include( "systemMessages/MissionList.pb.h" )
stub.include( "systemMessages/MissionStatus.pb.h" )
stub.include( "systemMessages/ModemConfig.pb.h" )
stub.include( "systemMessages/ModemGetRadioType.pb.h" )
stub.include( "systemMessages/ModemLinkStatus.pb.h" )
stub.include( "systemMessages/ModemPower.pb.h" )
stub.include( "systemMessages/NakMsg.pb.h" )
stub.include( "systemMessages/OperatorModuleConfig.pb.h" )
stub.include( "systemMessages/PWMRateMsg.pb.h" )
stub.include( "systemMessages/PayloadPower.pb.h" )
stub.include( "systemMessages/PosVelCtrlConfig.pb.h" )
stub.include( "systemMessages/PowerManagerConfig.pb.h" )
stub.include( "systemMessages/PowerStatus.pb.h" )
stub.include( "systemMessages/PressureData.pb.h" )
stub.include( "systemMessages/PrimaryControlMix.pb.h" )
stub.include( "systemMessages/PrimitiveDataTypes.pb.h" )
stub.include( "systemMessages/RcChannels.pb.h" )
stub.include( "systemMessages/RcInputCalibrationMsg.pb.h" )
stub.include( "systemMessages/RcInputMsg.pb.h" )
stub.include( "systemMessages/RebootRequestMsg.pb.h" )
stub.include( "systemMessages/RgbLed.pb.h" )
stub.include( "systemMessages/STM32OTPParams.pb.h" )
stub.include( "systemMessages/SaveConfigConstants.pb.h" )
stub.include( "systemMessages/ServerResponse.pb.h" )
stub.include( "systemMessages/Shape2D.pb.h" )
stub.include( "systemMessages/SimConfigurationRequest.pb.h" )
stub.include( "systemMessages/SimControlRequest.pb.h" )
stub.include( "systemMessages/StateMachineEnums.pb.h" )
stub.include( "systemMessages/SystemEnums.pb.h" )
stub.include( "systemMessages/SystemMode.pb.h" )
stub.include( "systemMessages/SystemPowerStatus.pb.h" )
stub.include( "systemMessages/TelemetryWatchdogConfig.pb.h" )
stub.include( "systemMessages/TemperatureData.pb.h" )
stub.include( "systemMessages/TestMessage.pb.h" )
stub.include( "systemMessages/ThreadStatsMsg.pb.h" )
stub.include( "systemMessages/TimeStamp.pb.h" )
stub.include( "systemMessages/VehicleDescriptionMessage.pb.h" )
stub.include( "systemMessages/VersionInfoEntry.pb.h" )
stub.newline()
stub.addLine( "typedef int16_t msgSize_t;" )
stub.newline()
stub.stubSysMsg( "CAGLMsg" )
stub.stubSysMsg( "CAGLOffsetMsg" )
stub.stubSysMsg( "CAGLRawMsg" )
stub.stubSysMsg( "CAccelGyroDataMsg" )
stub.stubSysMsg( "CAccelGyroDataRaw" )
stub.stubSysMsg( "CActiveControlSourceNotification" )
stub.stubSysMsg( "CActiveManeuverSourceNotification" )
stub.stubSysMsg( "CActuatorPictureMsg" )
stub.stubSysMsg( "CActuatorPortCalibration" )
stub.stubSysMsg( "CActuatorPortConfigMsg" )
stub.stubSysMsg( "CActuatorPowerBusMsg" )
stub.stubSysMsg( "CActuatorTakePictureMsg" )
stub.stubSysMsg( "CAirmailDebugLogSettingsMsg" )
stub.stubSysMsg( "CAirmailPoolStatsMsg" )
stub.stubSysMsg( "CAirspeedCalibrationDataMsg" )
stub.stubSysMsg( "CAltMSLCorrection" )
stub.stubSysMsg( "CAnnounceMsg" )
stub.stubSysMsg( "CAttCtrlConfig" )
stub.stubSysMsg( "CAuxControlMix" )
stub.stubSysMsg( "CAwxHeaderMsg" )
stub.stubSysMsg( "CBoardStatus" )
stub.stubSysMsg( "CClientRequest" )
stub.stubSysMsg( "CRegisterAsPeriodicPublisherMsg" )
stub.stubSysMsg( "CUnregisterAsPublisherMsg" )
stub.stubSysMsg( "CSubscribePeriodicMsg" )
stub.stubSysMsg( "CUnsubscribeTopicMsg" )
stub.stubSysMsg( "CRegisterAsCallerMsg" )
stub.stubSysMsg( "CUnregisterAsCallerMsg" )
stub.stubSysMsg( "CRegisterAsProviderMsg" )
stub.stubSysMsg( "CUnregisterAsProviderMsg" )
stub.stubSysMsg( "CCallServiceMsg" )
stub.stubSysMsg( "CPublishTopicMsg" )
stub.stubSysMsg( "CClientServiceResponseMsg" )
stub.stubSysMsg( "CConnectionStatus" )
stub.stubSysMsg( "CContingencyEventMap" )
stub.stubSysMsg( "CContingencyEventStatus" )
stub.stubSysMsg( "CControlLog" )
stub.stubSysMsg( "CControlLogRateConfig" )
stub.stubSysMsg( "CControlRequest" )
stub.stubSysMsg( "CDateOfLastConfigurationMsg" )
stub.stubSysMsg( "CSignatureIdMsg" )
stub.stubSysMsg( "CServiceInfoMsg" )
stub.stubSysMsg( "CProviderIdMsg" )
stub.stubSysMsg( "CSignatureHashMsg" )
stub.stubSysMsg( "CSignatureHashAndProviderMsg" )
stub.stubSysMsg( "CQueryResultMsg" )
stub.stubSysMsg( "CUniqueIdMsg" )
stub.stubSysMsg( "CNodeInfoMsg" )
stub.stubSysMsg( "CNodeIdAckMsg" )
stub.stubSysMsg( "CNodeIdMsg" )
stub.stubSysMsg( "CNodeIdListMsg" )
stub.stubSysMsg( "CNodeInfoFilterMsg" )
stub.stubSysMsg( "CEffectorCmdsMsg" )
stub.stubSysMsg( "CEffectorStatusMsg" )
stub.stubSysMsg( "CEffectorSurfaceMap" )
stub.stubSysMsg( "CEthernetPortStatusMsg" )
stub.stubSysMsg( "CEthernetStatusMsg" )
stub.stubSysMsg( "CListFilesRequest" )
stub.stubSysMsg( "CFileInfo" )
stub.stubSysMsg( "CListFilesResponse" )
stub.stubSysMsg( "CFileTransferMsg" )
stub.stubSysMsg( "CFlightStatus" )
stub.stubSysMsg( "CGCSConnectivityStatus" )
stub.stubSysMsg( "CGCSJobInfoMsg" )
stub.stubSysMsg( "CGPSData" )
stub.stubSysMsg( "CGPSRestartMsg" )
stub.stubSysMsg( "CGPSStatus" )
stub.stubSysMsg( "CGeofence" )
stub.stubSysMsg( "CGuidanceConfig" )
stub.stubSysMsg( "CHealthEventMsg" )
stub.stubSysMsg( "CHobbsMeterMsg" )
stub.stubSysMsg( "CIMUOrientationConfig" )
stub.stubSysMsg( "CINSAncillaryData" )
stub.stubSysMsg( "CINSAttitudeData" )
stub.stubSysMsg( "CINSConfigMsg" )
stub.stubSysMsg( "CINSCorrectionData" )
stub.stubSysMsg( "CINSCorrectionRequest" )
stub.stubSysMsg( "CINSErrorData" )
stub.stubSysMsg( "CINSLog" )
stub.stubSysMsg( "CVectorXYZ" )
stub.stubSysMsg( "CVectorNED" )
stub.stubSysMsg( "CDCM" )
stub.stubSysMsg( "CINSPosVelData" )
stub.stubSysMsg( "CINSStatusData" )
stub.stubSysMsg( "CKillCh" )
stub.stubSysMsg( "CKillModeMsg" )
stub.stubSysMsg( "CLaneSplitterStatsMsg" )
stub.stubSysMsg( "CLogEntryProvider" )
stub.stubSysMsg( "CLogMgmtCmd" )
stub.stubSysMsg( "CLogMgmtResponse" )
stub.stubSysMsg( "CLogRequestMsg" )
stub.stubSysMsg( "CMPUCalConfig" )
stub.stubSysMsg( "CMRAirframeConfig" )
stub.stubSysMsg( "CMagCalibrationParameters" )
stub.stubSysMsg( "CMagData" )
stub.stubSysMsg( "CMagDataRaw" )
stub.stubSysMsg( "CMagOrientationConfigMsg" )
stub.stubSysMsg( "CManeuver" )
stub.stubSysMsg( "CManeuverExecutionStatus" )
stub.stubSysMsg( "CManeuverPauseResumeMsg" )
stub.stubSysMsg( "CMapRcInputToFlightChannelMsg" )
stub.stubSysMsg( "CpointType" )
stub.stubSysMsg( "CMR_FLT_trackToPt" )
stub.stubSysMsg( "CMR_FLT_holdAtPt" )
stub.stubSysMsg( "CMR_FLT_manAttitude" )
stub.stubSysMsg( "CMR_FLT_manVelocity" )
stub.stubSysMsg( "CMR_TKO_liftoffMSL" )
stub.stubSysMsg( "CMR_LND_descendMSL" )
stub.stubSysMsg( "CMR_FLT_stopAndHold" )
stub.stubSysMsg( "CMR_LND_stopAndDescend" )
stub.stubSysMsg( "CMR_LND_attitudeOnly" )
stub.stubSysMsg( "CMR_FLT_minAltGoto" )
stub.stubSysMsg( "CMR_FLT_photoSurvey" )
stub.stubSysMsg( "CMR_FLT_surveyPoint" )
stub.stubSysMsg( "CLND_terminate" )
stub.stubSysMsg( "CFW_FLT_manAttitude" )
stub.stubSysMsg( "CFW_FLT_manFull" )
stub.stubSysMsg( "CFW_FLT_circle" )
stub.stubSysMsg( "CFW_FLT_slantTrackTo" )
stub.stubSysMsg( "CFW_FLT_directTo" )
stub.stubSysMsg( "CFW_TKO_launch" )
stub.stubSysMsg( "CFW_LND_touchdown" )
stub.stubSysMsg( "CFW_LND_glidingCircle" )
stub.stubSysMsg( "CFW_LND_attitudeOnly" )
stub.stubSysMsg( "CFW_FLT_photoSurvey" )
stub.stubSysMsg( "CMfgParamsMsg" )
stub.stubSysMsg( "CMission" )
stub.stubSysMsg( "CMissionExec" )
stub.stubSysMsg( "CMissionList" )
stub.stubSysMsg( "CMissionStatus" )
stub.stubSysMsg( "CRadioConfigMsg" )
stub.stubSysMsg( "CRadioConfigOOBMsg" )
stub.stubSysMsg( "CRadioTypeMsg" )
stub.stubSysMsg( "CradioLinkStatusMsg" )
stub.stubSysMsg( "CradioPowerMsg" )
stub.stubSysMsg( "CNakMsg" )
stub.stubSysMsg( "COperatorModuleConfig" )
stub.stubSysMsg( "CPWMRateMsg" )
stub.stubSysMsg( "CPayloadPowerMsg" )
stub.stubSysMsg( "CPosVelCtrlConfig" )
stub.stubSysMsg( "CPowerManagerConfig" )
stub.stubSysMsg( "CCircuitState" )
stub.stubSysMsg( "CPowerStatusMsg" )
stub.stubSysMsg( "CPressureData" )
stub.stubSysMsg( "CPrimaryControlMix" )
stub.stubSysMsg( "CBoolMsg" )
stub.stubSysMsg( "CSint32Msg" )
stub.stubSysMsg( "CUint32Msg" )
stub.stubSysMsg( "CFloatMsg" )
stub.stubSysMsg( "CRcInputCalibrationMsg" )
stub.stubSysMsg( "CRcInputMsg" )
stub.stubSysMsg( "CRebootRequestMsg" )
stub.stubSysMsg( "CRgbLedMsg" )
stub.stubSysMsg( "CSaveConfigMsg" )
stub.stubSysMsg( "CServerResponse" )
stub.stubSysMsg( "CTopicDataMsg" )
stub.stubSysMsg( "CServiceCallResultMsg" )
stub.stubSysMsg( "CServiceCallRequestMsg" )
stub.stubSysMsg( "CServiceCallRegistrationAck" )
stub.stubSysMsg( "CAcknowledgementMsg" )
stub.stubSysMsg( "CintPointType" )
stub.stubSysMsg( "CCircleType" )
stub.stubSysMsg( "CPolygonType" )
stub.stubSysMsg( "CShape2D" )
stub.stubSysMsg( "CSimConfigurationRequest" )
stub.stubSysMsg( "CSimControlRequestMsg" )
stub.stubSysMsg( "CSystemMode" )
stub.stubSysMsg( "CSystemPowerStatusMsg" )
stub.stubSysMsg( "CTelemetryWatchdogConfig" )
stub.stubSysMsg( "CTemperatureData" )
stub.stubSysMsg( "CTestMessage" )
stub.stubSysMsg( "CThreadStatsMsg" )
stub.stubSysMsg( "CTimeStamp" )
stub.stubSysMsg( "CVehicleDescriptionMessage" )
stub.stubSysMsg( "CVersionEntry" )
stub.stubSysMsg( "CVersionMsg" )
| denniswjackson/embedded-tools | apollo/bin/stubFactory/stub_systemMessages.py | Python | mit | 13,464 |
import scipy.misc, numpy as np, os, sys
def save_img(out_path, img):
img = np.clip(img, 0, 255).astype(np.uint8)
scipy.misc.imsave(out_path, img)
def scale_img(style_path, style_scale):
scale = float(style_scale)
o0, o1, o2 = scipy.misc.imread(style_path, mode='RGB').shape
scale = float(style_scale)
new_shape = (int(o0 * scale), int(o1 * scale), o2)
style_target = _get_img(style_path, img_size=new_shape)
return style_target
def get_img(src, img_size=False):
img = scipy.misc.imread(src, mode='RGB') # misc.imresize(, (256, 256, 3))
if not (len(img.shape) == 3 and img.shape[2] == 3):
img = np.dstack((img,img,img))
if img_size != False:
img = scipy.misc.imresize(img, img_size)
return img
def exists(p, msg):
assert os.path.exists(p), msg
def list_files(in_path):
files = []
for (dirpath, dirnames, filenames) in os.walk(in_path):
files.extend(filenames)
break
return files
| gmittal/prisma | server/src/utils.py | Python | mit | 975 |
from datetime import datetime
import hashlib
from extractor import Ways
from date import way_date
class Helpers:
'''
'''
@staticmethod
def make_id(website, timestamp):
'''
'''
m=hashlib.md5()
m.update(''.join([website, timestamp]).encode())
return m.hexdigest()
class WayDefault:
'''
'''
@classmethod
def set_parser(cls, ParserObj):
'''
'''
cls.parser=ParserObj
def __init__(self, snap_dict):
'''
'''
self._raw=snap_dict
self.timestamp=snap_dict['timestamp']
self._data=self.parser.parse(self._raw['page'])
self._data.update({
'website':snap_dict['website'],
'timestamp':way_date(self.timestamp),
})
self.id=Helpers.make_id(snap_dict['website'],self.timestamp)
self.report=snap_dict['report']
@property
def extracted(self):
'''
'''
return {k:v for k,v in self._data.items() if k != 'page'}
@property
def snapshot(self):
'''
'''
return self._data['page']
@property
def data(self):
'''
'''
return self._data
WayDefault.set_parser(Ways)
| VulcanoAhab/waybackeess | response.py | Python | mit | 1,236 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^(?P<quiz_slug>[-A-Za-z0-9_]+)/$', views.quiz, name='quiz'),
url(r'^(?P<quiz_slug>[-A-Za-z0-9_]+)/(?P<question_slug>[-A-Za-z0-9_]+)/$', views.question, name='question')
]
| super1337/Super1337-CTF | questionnaire/urls.py | Python | mit | 299 |
import sys # this allows you to read the user input from keyboard also called "stdin"
import classOne # This imports all the classOne functions
import classTwo # This imports all the classTwo functions
import classThree # This imports all the classThree functions
import classFour # This imports all the classFour functions
TIMEOUT=10 # this is the amount of time you will wait for an answer in Seconds. 10 means 10 seconds
MAX_CLASS=5
QUIZ_INSTRUCTIONS = """
Get ready for the quiz. You will have 10 questions out of which you
will need 8 right to win the prize. You will have """ + str(TIMEOUT) + """ seconds
to answer each question.Press Enter to start."""
def getUsersClass(): #main
''' This function will get the user's class. It will compare the class with MAX_CLASS and
will return False if it is more than the MAX_CLASS. Class also has to be a natural number '''
print("Please tell me which Class you are in? ")
try:
usersClass = int(sys.stdin.readline().strip())
if (usersClass < 1 or usersClass > MAX_CLASS) :
print("No Quiz available for Class " + str(usersClass))
return False
else :
return usersClass
except :
print("Exception")
return False
if __name__ == '__main__':
while(True) :
usersClass = getUsersClass()
if (usersClass != False) :
break
print(QUIZ_INSTRUCTIONS)
sys.stdin.readline()
if (usersClass == 1) :
classOne.classOneQuiz()
elif (usersClass == 2) :
classTwo.classTwoQuiz()
elif(usersClass == 3):
classThree.classThreeQuiz()
elif(usersClass == 4):
classFour.classFourQuiz()
| nischal2002/m-quiz-2016 | quiz.py | Python | mit | 1,704 |
import numpy
from srxraylib.plot.gol import plot_image, plot
import sys
from comsyl.scripts.CompactAFReader import CompactAFReader
def plot_stack(mystack,what="intensity",title0="X",title1="Y",title2="Z"):
from silx.gui.plot.StackView import StackViewMainWindow
from silx.gui import qt
app = qt.QApplication(sys.argv[1:])
sv = StackViewMainWindow()
sv.setColormap("jet", autoscale=True)
if what == "intensity":
sv.setStack(numpy.absolute(mystack))
elif what == "real":
sv.setStack(numpy.real(mystack))
elif what == "imaginary":
sv.setStack(numpy.imag(mystack))
elif what == "phase":
sv.setStack(numpy.angle(mystack))
elif what == "phase_deg":
sv.setStack(numpy.angle(mystack,deg=True))
else:
raise Exception("Undefined label "+what)
sv.setLabels([title0,title1,title2])
sv.show()
app.exec_()
def load_stack(filename):
# filename = "/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5"
reader = CompactAFReader(filename)
print("File %s:" % filename)
print("contains")
print("%i modes" % reader.number_modes())
print("on the grid")
print("x: from %e to %e" % (reader.x_coordinates().min(), reader.x_coordinates().max()))
print("y: from %e to %e" % (reader.y_coordinates().min(), reader.y_coordinates().max()))
print("calculated at %f eV" % reader.photon_energy())
print("with total intensity in (maybe improper) normalization: %e" % reader.total_intensity().real.sum())
print("Occupation and max abs value of the mode")
x = reader.x_coordinates()
y = reader.y_coordinates()
eigenvalues = numpy.zeros(reader.number_modes())
mystack = numpy.zeros((reader.number_modes(),y.size,x.size),dtype=complex)
for i_mode in range(reader.number_modes()):
eigenvalues[i_mode] = reader.occupation(i_mode)
mode = reader.mode(i_mode)
mystack[i_mode,:,:] = mode.T
return x,y,mystack, eigenvalues
if __name__ == "__main__":
h,v,mystack, occupation = load_stack("/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5")
plot_stack(mystack,what="intensity", title0="Mode index",
title1="V from %3.2f to %3.2f um"%(1e3*v.min(),1e3*v.max()),
title2="H from %3.2f to %3.2f um"%(1e3*h.min(),1e3*h.max()))
plot(numpy.arange(occupation.size),occupation) | srio/oasys-comsyl | orangecontrib/comsyl/scripts/load_results_from_file.py | Python | mit | 2,411 |
from django.conf.urls import url,include
from django.contrib import admin
from cn_device import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^send/(?P<id_ras>[0-9]+)/$',views.payam,name='send condition'),
url(r'^give/(?P<id_ras>[0-9]+)/(?P<bl>[0-1])/$', views.give_req, name='give condition'),
]
| msadegh97/IoT_first-project | server/server/urls.py | Python | mit | 326 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20150819_0050'),
]
operations = [
migrations.AlterUniqueTogether(
name='test',
unique_together=set([('owner', 'name')]),
),
]
| swarmer/tester | core/migrations/0009_auto_20150821_0243.py | Python | mit | 376 |
"""
Run latency & thruput tests on various server configurations.
"""
import glob
import os.path
import shutil
import time
from openmdao.main.mp_util import read_server_config
from openmdao.main.objserverfactory import connect, start_server
from openmdao.util.fileutil import onerror
MESSAGE_DATA = []
def init_messages():
""" Initialize message data for various sizes. """
for i in range(21):
MESSAGE_DATA.append(' ' * (1 << i))
def run_test(name, server):
""" Run latency & bandwidth test on `server`. """
for i in range(10):
server.echo(MESSAGE_DATA[0]) # 'prime' the connection.
results = []
reps = 1000
for msg in MESSAGE_DATA:
start = time.time()
for i in range(reps):
server.echo(msg)
et = time.time() - start
size = len(msg)
latency = et / reps
thruput = len(msg) / (et / reps)
print '%d msgs of %d bytes, latency %g, thruput %g' \
% (reps, size, latency, thruput)
results.append((size, latency, thruput))
if et > 2 and reps >= 20:
reps /= int((et / 2) + 0.5)
return results
def main():
""" Run latency & thruput tests on various server configurations. """
init_messages()
latency_results = {}
thruput_results = {}
# For each configuration...
count = 0
for authkey in ('PublicKey', 'UnEncrypted'):
for ip_port in (-1, 0):
for hops in (1, 2):
# Start factory in unique directory.
count += 1
name = 'Echo_%d' % count
if os.path.exists(name):
shutil.rmtree(name, onerror=onerror)
os.mkdir(name)
os.chdir(name)
try:
server_proc, server_cfg = \
start_server(authkey=authkey, port=ip_port)
cfg = read_server_config(server_cfg)
finally:
os.chdir('..')
# Connect to factory.
address = cfg['address']
port = cfg['port']
key = cfg['key']
print
print '%s, %s %d, hops: %d' % (authkey, address, port, hops)
factory = connect(address, port, authkey=authkey, pubkey=key)
if hops == 1:
server = factory
else:
# Create a server.
server = factory.create('')
# Run test.
results = run_test(name, server)
# Shutdown.
if server is not factory:
factory.release(server)
factory.cleanup()
server_proc.terminate(timeout=10)
# Add results.
for size, latency, thruput in results:
if size not in latency_results:
latency_results[size] = []
latency_results[size].append(latency)
if size not in thruput_results:
thruput_results[size] = []
thruput_results[size].append(thruput)
# Write out results in X, Y1, Y2, ... format.
header = 'Bytes,En-S-1,En-S-2,En-P-1,En-P-2,Un-S-1,Un-S-2,Un-P-1,Un-P-2\n'
with open('latency.csv', 'w') as out:
out.write(header)
for size in sorted(latency_results.keys()):
out.write('%d' % size)
for value in latency_results[size]:
out.write(', %g' % value)
out.write('\n')
with open('thruput.csv', 'w') as out:
out.write(header)
for size in sorted(thruput_results.keys()):
out.write('%d' % size)
for value in thruput_results[size]:
out.write(', %g' % value)
out.write('\n')
for path in glob.glob('Echo_*'):
shutil.rmtree(path, onerror=onerror)
if __name__ == '__main__':
main()
| DailyActie/Surrogate-Model | 01-codes/OpenMDAO-Framework-dev/openmdao.main/src/openmdao/main/test/netperf.py | Python | mit | 3,996 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import gdb
import pwndbg.abi
import pwndbg.color.chain as C
import pwndbg.color.memory as M
import pwndbg.color.theme as theme
import pwndbg.enhance
import pwndbg.memory
import pwndbg.symbol
import pwndbg.typeinfo
import pwndbg.vmmap
LIMIT = pwndbg.config.Parameter('dereference-limit', 5, 'max number of pointers to dereference in a chain')
def get(address, limit=LIMIT, offset=0, hard_stop=None, hard_end=0, include_start=True):
"""
Recursively dereferences an address. For bare metal, it will stop when the address is not in any of vmmap pages to avoid redundant dereference.
Arguments:
address(int): the first address to begin dereferencing
limit(int): number of valid pointers
offset(int): offset into the address to get the next pointer
hard_stop(int): address to stop at
hard_end: value to append when hard_stop is reached
include_start(bool): whether to include starting address or not
Returns:
A list representing pointers of each ```address``` and reference
"""
limit = int(limit)
result = [address] if include_start else []
for i in range(limit):
# Don't follow cycles, except to stop at the second occurrence.
if result.count(address) >= 2:
break
if hard_stop is not None and address == hard_stop:
result.append(hard_end)
break
try:
address = address + offset
# Avoid redundant dereferences in bare metal mode by checking
# if address is in any of vmmap pages
if not pwndbg.abi.linux and not pwndbg.vmmap.find(address):
break
address = int(pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, address))
address &= pwndbg.arch.ptrmask
result.append(address)
except gdb.MemoryError:
break
return result
config_arrow_left = theme.Parameter('chain-arrow-left', '◂—', 'left arrow of chain formatting')
config_arrow_right = theme.Parameter('chain-arrow-right', '—▸', 'right arrow of chain formatting')
config_contiguous = theme.Parameter('chain-contiguous-marker', '...', 'contiguous marker of chain formatting')
def format(value, limit=LIMIT, code=True, offset=0, hard_stop=None, hard_end=0):
"""
Recursively dereferences an address into string representation, or convert the list representation
of address dereferences into string representation.
Arguments:
value(int|list): Either the starting address to be sent to get, or the result of get (a list)
limit(int): Number of valid pointers
code(bool): Hint that indicates the value may be an instruction
offset(int): Offset into the address to get the next pointer
hard_stop(int): Value to stop on
hard_end: Value to append when hard_stop is reached: null, value of hard stop, a string.
Returns:
A string representing pointers of each address and reference
Strings format: 0x0804a10 —▸ 0x08061000 ◂— 0x41414141
"""
limit = int(limit)
# Allow results from get function to be passed to format
if isinstance(value, list):
chain = value
else:
chain = get(value, limit, offset, hard_stop, hard_end)
arrow_left = C.arrow(' %s ' % config_arrow_left)
arrow_right = C.arrow(' %s ' % config_arrow_right)
# Colorize the chain
rest = []
for link in chain:
symbol = pwndbg.symbol.get(link) or None
if symbol:
symbol = '%#x (%s)' % (link, symbol)
rest.append(M.get(link, symbol))
# If the dereference limit is zero, skip any enhancements.
if limit == 0:
return rest[0]
# Otherwise replace last element with the enhanced information.
rest = rest[:-1]
# Enhance the last entry
# If there are no pointers (e.g. eax = 0x41414141), then enhance
# the only element there is.
if len(chain) == 1:
enhanced = pwndbg.enhance.enhance(chain[-1], code=code)
# Otherwise, the last element in the chain is the non-pointer value.
# We want to enhance the last pointer value. If an offset was used
# chain failed at that offset, so display that offset.
elif len(chain) < limit + 1:
enhanced = pwndbg.enhance.enhance(chain[-2] + offset, code=code)
else:
enhanced = C.contiguous('%s' % config_contiguous)
if len(chain) == 1:
return enhanced
return arrow_right.join(rest) + arrow_left + enhanced
| anthraxx/pwndbg | pwndbg/chain.py | Python | mit | 4,555 |
# encoding: utf-8
__author__ = "Nils Tobias Schmidt"
__email__ = "schmidt89 at informatik.uni-marburg.de"
from androlyze.error.WrapperException import WrapperException
############################################################
#---Helper functions
############################################################
def _create_delete_error_msg(content, destination):
return "Could not delete %s from %s" % (content, destination)
def _create_store_error_msg(content, destination):
return "Could not store result for %s to %s" % (content, destination)
def _create_load_error_msg(content, source):
return "Could not load %s from %s" % (content, source)
############################################################
#---General storage exceptions
############################################################
class StorageException(WrapperException):
''' Base exception for data storage '''
pass
############################################################
#---Database storage exceptions
############################################################
DB_STORE = "database"
class DatabaseException(StorageException):
pass
class EDatabaseException(DatabaseException):
''' Extended DatabaseException that has the database as parameter as well as content '''
def __init__(self, db, content, caused_by = None, **kwargs):
'''
Parameters
----------
db : object
content : object
The object that couldn't be loaded/stored.
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
DatabaseException.__init__(self, caused_by = caused_by, **kwargs)
self.db = db
self.content = content
class DatabaseDeleteException(EDatabaseException):
def _msg(self):
return _create_delete_error_msg(self.content, self.db)
class DatabaseStoreException(EDatabaseException):
def _msg(self):
return _create_store_error_msg(self.content, self.db)
class DatabaseLoadException(EDatabaseException):
def _msg(self):
return _create_load_error_msg(self.content, self.db)
class DatabaseOpenError(DatabaseException):
def __init__(self, db_name, **kwargs):
super(DatabaseOpenError, self).__init__(**kwargs)
self.db_name = db_name
def _msg(self):
return 'Could not open database: "%s"' % self.db_name
############################################################
#---S3 storage exceptions
############################################################
DB_STORE = "database"
class S3StorageException(StorageException):
pass
class ES3StorageException(S3StorageException):
''' Extended DatabaseException that has the database as parameter as well as content '''
def __init__(self, db, content, caused_by = None, **kwargs):
'''
Parameters
----------
db : object
content : object
The object that couldn't be loaded/stored.
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
S3StorageException.__init__(self, caused_by = caused_by, **kwargs)
self.db = db
self.content = content
class S3StorageDeleteException(ES3StorageException):
def _msg(self):
return _create_delete_error_msg(self.content, self.db)
class S3StorageStoreException(ES3StorageException):
def _msg(self):
return _create_store_error_msg(self.content, self.db)
class S3StorageLoadException(ES3StorageException):
def _msg(self):
return _create_load_error_msg(self.content, self.db)
class S3StorageOpenError(ES3StorageException):
def __init__(self, db_name, **kwargs):
super(ES3StorageException, self).__init__(**kwargs)
self.db_name = db_name
def _msg(self):
return 'Could not open bucket: "%s"' % self.db_name
############################################################
#---File system storage exceptions
############################################################
class FileSysException(StorageException):
def __init__(self, file_path, fs_storage, *args, **kwargs):
'''
Parameters
----------
file_path: str
the path of the file
fs_store : FileSysStorage
'''
super(FileSysException, self).__init__(*args, **kwargs)
self.file_path = file_path
self.fs_storage = fs_storage
class FileSysStoreException(FileSysException):
def __init__(self, file_path, content, fs_storage, caused_by = None):
'''
Parameters
----------
file_path: str
the path of the file
content: object
the content which should be stored
fs_store : FileSysStorage
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
super(FileSysStoreException, self).__init__(file_path, fs_storage, caused_by = caused_by)
self.content = content
def _msg(self):
return _create_store_error_msg(self.content, self.file_path)
class FileSysCreateStorageStructureException(FileSysException):
def __init__(self, file_path, fs_storage, caused_by = None):
'''
Parameters
----------
file_path: str
the path of the file
fs_store : FileSysStorage
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
super(FileSysCreateStorageStructureException, self).__init__(file_path, fs_storage, caused_by = caused_by)
def _msg(self):
return "Could not create the file system structure: %s" % self.file_path
class FileSysLoadException(FileSysException):
def _msg(self):
return _create_load_error_msg(self.file_path, self.fs_storage)
class FileSysDeleteException(FileSysException):
def _msg(self):
return _create_delete_error_msg(self.file_path, self.fs_storage)
| nachtmaar/androlyze | androlyze/storage/exception/__init__.py | Python | mit | 6,046 |
import os, sys
import commands
import optparse
import shutil
INSTALL_DIR = ""
BASE_DIR = os.path.dirname(__file__)
SIP_FILE = "poppler-qt4.sip"
BUILD_DIR = "build"
SBF_FILE = "QtPoppler.sbf"
def _cleanup_path(path):
"""
Cleans the path:
- Removes traling / or \
"""
path = path.rstrip('/')
path = path.rstrip('\\')
return path
def pkgconfig(package):
'''
Calls pkg-config for the given package
Returns: - None if the package is not found.
- {'inc_dirs': [List of -L Paths]
'lib_dirs' : [List of -I Paths]
'libs ' : [List of -l libs]
}
'''
code, msg = commands.getstatusoutput("pkg-config --exists %s" % package)
if code != 0:
return None
tokens = commands.getoutput("pkg-config --libs --cflags %s" % package).split()
return {
'inc_dirs': [ token[2:] for token in tokens if token[:2] == '-I'],
'lib_dirs': [ token[2:] for token in tokens if token[:2] == '-L'],
'libs': [ token[2:] for token in tokens if token[:2] == '-l'],
}
def create_optparser(sipcfg):
'''Comandline parser'''
def store_abspath(option, opt_str, value, parser):
setattr(parser.values, option.dest, os.path.abspath(value))
def get_default_moddir():
default = sipcfg.default_mod_dir
default = os.path.join(default, INSTALL_DIR)
return default
p = optparse.OptionParser(usage="%prog [options]")
default_moddir = get_default_moddir()
p.add_option("-d", "--destdir", action="callback",
default=default_moddir, type="string",
metavar="DIR",
dest="moddir", callback=store_abspath,
help="Where to install PyPoppler-Qt4 python modules."
"[default: %default]")
p.add_option("-s", "--sipdir", action="callback",
default=os.path.join(sipcfg.default_sip_dir, INSTALL_DIR),
metavar="DIR", dest="sipdir", callback=store_abspath,
type="string", help="Where the .sip files will be installed "
"[default: %default]")
p.add_option("", "--popplerqt-includes-dir", action="callback",
default=None,
metavar="DIR", dest="popplerqt_inc_dirs", callback=store_abspath,
type="string", help="PopplerQt include paths"
"[default: Auto-detected with pkg-config]")
p.add_option("", "--popplerqt-libs-dir", action="callback",
default=None,
metavar="DIR", dest="popplerqt_lib_dirs", callback=store_abspath,
type="string", help="PopplerQt libraries paths"
"[default: Auto-detected with pkg-config]")
return p
def get_pyqt4_config():
try:
import PyQt4.pyqtconfig
return PyQt4.pyqtconfig.Configuration()
except ImportError, e:
print >> sys.stderr, "ERROR: PyQt4 not found."
sys.exit(1)
def get_sip_config():
try:
import sipconfig
return sipconfig.Configuration()
except ImportError, e:
print >> sys.stderr, "ERROR: SIP (sipconfig) not found."
sys.exit(1)
def get_popplerqt_config(opts):
config = pkgconfig('poppler-qt4')
if config is not None:
found_pkgconfig = True
else:
found_pkgconfig = False
config = {'libs': ['poppler-qt4', 'poppler'],
'inc_dirs': None,
'lib_dirs': None}
if opts.popplerqt_inc_dirs is not None:
config['inc_dirs'] = opts.popplerqt_inc_dirs.split(" ")
if opts.popplerqt_lib_dirs is not None:
config['lib_dirs'] = opts.popplerqt_lib_dirs.split(" ")
if config['lib_dirs'] is None or config['inc_dirs'] is None:
print >> sys.stderr, "ERROR: poppler-qt4 not found."
print "Try to define PKG_CONFIG_PATH "
print "or use --popplerqt-libs-dir and --popplerqt-includes-dir options"
sys.exit(1)
config['inc_dirs'] = map(_cleanup_path, config['inc_dirs'])
config['lib_dirs'] = map(_cleanup_path, config['lib_dirs'])
config['sip_dir'] = _cleanup_path(opts.sipdir)
config['mod_dir'] = _cleanup_path(opts.moddir)
print "Using PopplerQt include paths: %s" % config['inc_dirs']
print "Using PopplerQt libraries paths: %s" % config['lib_dirs']
print "Configured to install SIP in %s" % config['sip_dir']
print "Configured to install binaries in %s" % config['mod_dir']
return config
def create_build_dir():
dir = os.path.join(BASE_DIR, BUILD_DIR)
if os.path.exists(dir):
return
try:
os.mkdir(dir)
except:
print >> sys.stderr, "ERROR: Unable to create the build directory (%s)" % dir
sys.exit(1)
def run_sip(pyqtcfg):
create_build_dir()
cmd = [pyqtcfg.sip_bin,
"-c", os.path.join(BASE_DIR, BUILD_DIR),
"-b", os.path.join(BUILD_DIR, SBF_FILE),
"-I", pyqtcfg.pyqt_sip_dir,
pyqtcfg.pyqt_sip_flags,
os.path.join(BASE_DIR, SIP_FILE)]
os.system( " ".join(cmd) )
def generate_makefiles(pyqtcfg, popplerqtcfg, opts):
from PyQt4 import pyqtconfig
import sipconfig
pypopplerqt4config_file = os.path.join(BASE_DIR, "pypopplerqt4config.py")
# Creeates the Makefiles objects for the build directory
makefile_build = pyqtconfig.sipconfig.ModuleMakefile(
configuration=pyqtcfg,
build_file=SBF_FILE,
dir=BUILD_DIR,
install_dir=popplerqtcfg['mod_dir'],
warnings=1,
qt=['QtCore', 'QtGui', 'QtXml']
)
# Add extras dependencies for the compiler and the linker
# Libraries names don't include any platform specific prefixes
# or extensions (e.g. the "lib" prefix on UNIX, or the ".dll" extension on Windows)
makefile_build.extra_lib_dirs = popplerqtcfg['lib_dirs']
makefile_build.extra_libs = popplerqtcfg['libs']
makefile_build.extra_include_dirs = popplerqtcfg['inc_dirs']
# Generates build Makefile
makefile_build.generate()
# Generates root Makefile
installs_root = []
installs_root.append( (os.path.join(BASE_DIR, SIP_FILE), popplerqtcfg['sip_dir']) )
installs_root.append( (pypopplerqt4config_file, popplerqtcfg['mod_dir']) )
sipconfig.ParentMakefile(
configuration=pyqtcfg,
subdirs=[_cleanup_path(BUILD_DIR)],
installs=installs_root
).generate()
def generate_configuration_module(pyqtcfg, popplerqtcfg, opts):
import sipconfig
content = {
"pypopplerqt4_sip_dir": popplerqtcfg['sip_dir'],
"pypopplerqt4_sip_flags": pyqtcfg.pyqt_sip_flags,
"pypopplerqt4_mod_dir": popplerqtcfg['mod_dir'],
"pypopplerqt4_modules": 'PopplerQt',
"popplerqt4_inc_dirs": popplerqtcfg['inc_dirs'],
"popplerqt4_lib_dirs": popplerqtcfg['lib_dirs'],
}
# This creates the pypopplerqt4config.py module from the pypopplerqt4config.py.in
# template and the dictionary.
sipconfig.create_config_module(
os.path.join(BASE_DIR, "pypopplerqt4config.py"),
os.path.join(BASE_DIR, "pypopplerqt4config.py.in"),
content)
def main():
sipcfg = get_sip_config()
pyqtcfg = get_pyqt4_config()
parser = create_optparser(sipcfg)
opts, args = parser.parse_args()
popplerqtcfg = get_popplerqt_config(opts)
run_sip(pyqtcfg)
generate_makefiles(pyqtcfg, popplerqtcfg, opts)
generate_configuration_module(pyqtcfg, popplerqtcfg, opts)
if __name__ == "__main__":
main()
| cbeing/remoteSlideShow | third_party/pypoppler-qt4/configure.py | Python | mit | 7,528 |
#-*- coding: utf8 -*-
'''
Examples of advanced Python features:
- metaclass
- descriptor
- generator/forloop
'''
from __future__ import print_function
import sys
if sys.version_info > (3, ): # Python 3
exec('''
def exec_in(code, glob, loc=None):
if isinstance(code, str):
code = compile(code, '<string>', 'exec', dont_inherit=True)
exec(code, glob, loc)
''')
exec_in('''
def with_meta(cls):
class Meta(metaclass=cls):
pass
return Meta
''', globals())
else:
exec('''
def exec_in(code, glob, loc=None):
if isinstance(code, str):
code = compile(code, '', 'exec', dont_inherit=True)
exec code in glob, loc
''')
exec_in('''
def with_meta(cls):
class Meta(object):
__metaclass__ = cls
pass
return Meta
''', globals())
class AnimalMeta(type):
species = 0
def __new__(cls, name, bases, attrs):
if not name == 'Meta':
cls.species += 1
print(
'First, metaclass.__new__ received (metaclass, name, bases, attrs)')
print(cls, name, bases, attrs)
return super(AnimalMeta, cls).__new__(cls, name, bases, attrs)
def __init__(self, name, bases, attrs):
if not name == 'Meta':
print(
'Second, metaclass.__init__ received (self, name, bases, attrs)')
print(self, name, bases, attrs)
def __call__(self, *args, **kwargs):
print("AnimalMeta.__call__")
return super(AnimalMeta, self).__call__(*args, **kwargs)
class Cat(with_meta(AnimalMeta)):
name = 'cat'
def __init__(self):
print('Meow')
kit = Cat()
| dlutxx/memo | python/advanced.py | Python | mit | 1,657 |
"""
Prefill an Array
(6 kyu)
https://www.codewars.com/kata/54129112fb7c188740000162/train/python
Create the function prefill that returns an array of n elements that all have
the same value v. See if you can do this without using a loop.
You have to validate input:
- v can be anything (primitive or otherwise)
- if v is ommited, fill the array with undefined
- if n is 0, return an empty array
- if n is anything other than an integer or integer-formatted string
(e.g. '123') that is >=0, throw a TypeError
- When throwing a TypeError, the message should be n is invalid, where you
replace n for the actual value passed to the function.
Code Examples
prefill(3,1) --> [1,1,1]
prefill(2,"abc") --> ['abc','abc']
prefill("1", 1) --> [1]
prefill(3, prefill(2,'2d'))
--> [['2d','2d'],['2d','2d'],['2d','2d']]
prefill("xyz", 1)
--> throws TypeError with message "xyz is invalid"
"""
def prefill(n, v=None):
try:
n = int(n)
except (ValueError, TypeError):
raise TypeError("{} is invalid".format(n))
return [v] * n
| benpetty/Code-Katas | katas/prefill_an_array/prefill_an_array.py | Python | mit | 1,086 |
from .mdl_user import *
from .mdl_club import *
from .mdl_event import *
from .mdl_receipt import *
from .mdl_budget import *
from .mdl_division import *
from .mdl_eventsignin import *
from .mdl_joinrequest import *
| fsxfreak/club-suite | clubsuite/suite/models/__init__.py | Python | mit | 216 |
__author__ = 'deevarvar'
import string
import random
import os
#generate a random string
def string_generator(size=6, chars=string.ascii_letters+string.digits):
return ''.join(random.choice(chars) for _ in range(size))
#emulate touch cmd
def touchFile(fname, time=None):
with open(fname, 'a'):
os.utime(fname,time)
| deevarvar/myLab | book/tlpi_zhiye/utlib/ut_util.py | Python | mit | 335 |
import typing
from flask import json
FlaskHeaders = typing.Union[typing.List[typing.Tuple[str, str]], typing.Dict[str, str]]
FlaskResponse = typing.Tuple[str, int, FlaskHeaders]
def success(data, status=200) -> FlaskResponse:
return json.dumps(data, indent=2), status, [("Content-Type", "application/json")]
def failure(message, status=400) -> FlaskResponse:
return json.dumps({"errors": ([message]\
if hasattr(message, 'strip') else message if hasattr(message, 'split')\
else repr(message))}, indent=2),\
status,\
[("Content-Type", "application/json")]
| nanobox-io/nanobox-adapter-libcloud | nanobox_libcloud/utils/output.py | Python | mit | 612 |
#!/usr/bin/python2
# core.py
# aoneill - 04/10/17
import sys
import random
import time
import pauschpharos as PF
import lumiversepython as L
SEQ_LIM = 200
def memoize(ignore = None):
if(ignore is None):
ignore = set()
def inner(func):
cache = dict()
def wrapper(*args):
memo = tuple(filter(lambda x: x,
map(lambda (i, e):
e if (i not in ignore)
else None,
enumerate(args))))
if(memo not in cache):
cache[memo] = func(*args)
return cache[memo]
return wrapper
return inner
def blank():
p = PF.PauschPharos()
p.SetBlank()
p.Trigger(PF.DEFAULT_ID, None)
def fireplace(rig):
# Warm up cache
for seq in xrange(SEQ_LIM):
query(rig, '$sequence=%d' % seq)
def init(upload = True, run = True, wipe = True, fire = True):
rig = L.Rig("/home/teacher/Lumiverse/PBridge.rig.json")
rig.init()
# Upload the blank template
if(upload):
blank()
# Run if requested
if(run):
rig.run()
# Wipe if requested
if(wipe):
for seq in xrange(SEQ_LIM):
query(rig, '$sequence=%d' % seq).setRGBRaw(0, 0, 0)
# Heat up the cache
if(fire and not wipe):
fireplace(rig)
return rig
@memoize(ignore = set([0]))
def query(rig, text):
return rig.select(text)
def seq(rig, num):
return query(rig, '$sequence=%d' % num)
def rand_color():
func = lambda: random.randint(0, 255) / 255.0
return (func(), func(), func())
| alexoneill/15-love | game/core.py | Python | mit | 1,573 |
from .circleclient import __version__
| qba73/circleclient | circleclient/__init__.py | Python | mit | 39 |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Doriancoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet keypool and interaction with wallet encryption/locking."""
from test_framework.test_framework import DoriancoinTestFramework
from test_framework.util import *
class KeyPoolTest(DoriancoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
# Encrypt wallet and wait to terminate
nodes[0].node_encrypt_wallet('test')
# Restart node 0
self.start_node(0)
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].validateaddress(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 6)
assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
addr = set()
# the next one should fail
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
addr.add(nodes[0].getnewaddress())
assert(len(addr) == 6)
# the next one should fail
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
nodes[0].keypoolrefill(3)
# test walletpassphrase timeout
time.sleep(1.1)
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
# drain them by mining
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].generate, 1)
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
assert_equal(wi['keypoolsize_hd_internal'], 100)
assert_equal(wi['keypoolsize'], 100)
if __name__ == '__main__':
KeyPoolTest().main()
| doriancoins/doriancoin | test/functional/wallet_keypool.py | Python | mit | 3,428 |
#! /usr/bin/env python
"""Unit tests for SCardConnect/SCardStatus/SCardDisconnect
This test case can be executed individually, or with all other test cases
thru testsuite_scard.py.
__author__ = "http://www.gemalto.com"
Copyright 2001-2012 gemalto
Author: Jean-Daniel Aussel, mailto:[email protected]
This file is part of pyscard.
pyscard is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
pyscard is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
from smartcard.scard import *
# import local_config for reader/card configuration
# configcheck.py is generating local_config.py in
# the test suite.
import sys
sys.path += ['..']
try:
from local_config import expectedATRs, expectedReaders
from local_config import expectedReaderGroups, expectedATRinReader
except:
print 'execute test suite first to generate the local_config.py file'
sys.exit()
class testcase_getATR(unittest.TestCase):
"""Test scard API for ATR retrieval"""
def setUp(self):
hresult, self.hcontext = SCardEstablishContext(SCARD_SCOPE_USER)
self.assertEquals(hresult, 0)
hresult, self.readers = SCardListReaders(self.hcontext, [])
self.assertEquals(hresult, 0)
def tearDown(self):
hresult = SCardReleaseContext(self.hcontext)
self.assertEquals(hresult, 0)
def _getATR(self, r):
if r < len(expectedATRs) and [] != expectedATRs[r]:
hresult, hcard, dwActiveProtocol = SCardConnect(
self.hcontext,
self.readers[r],
SCARD_SHARE_SHARED,
SCARD_PROTOCOL_T0 | SCARD_PROTOCOL_T1)
self.assertEquals(hresult, 0)
try:
hresult, reader, state, protocol, atr = SCardStatus(hcard)
self.assertEquals(hresult, 0)
self.assertEquals(reader, expectedReaders[r])
self.assertEquals(atr, expectedATRs[r])
finally:
hresult = SCardDisconnect(hcard, SCARD_UNPOWER_CARD)
self.assertEquals(hresult, 0)
def test_getATR0(self):
testcase_getATR._getATR(self, 0)
def test_getATR1(self):
testcase_getATR._getATR(self, 1)
def test_getATR2(self):
testcase_getATR._getATR(self, 2)
def test_getATR3(self):
testcase_getATR._getATR(self, 3)
def suite():
suite1 = unittest.makeSuite(testcase_getATR)
return unittest.TestSuite((suite1))
if __name__ == '__main__':
unittest.main()
| mixja/eap-sim-lab | lib/pyscard-1.6.16/smartcard/test/scard/testcase_getatr.py | Python | mit | 3,083 |
#!/usr/bin/env python
from argparse import ArgumentParser
from collections import defaultdict
import sys
import os
from sonLib.bioio import cigarRead, cigarWrite, getTempFile, system
def getSequenceRanges(fa):
"""Get dict of (untrimmed header) -> [(start, non-inclusive end)] mappings
from a trimmed fasta."""
ret = defaultdict(list)
curSeq = ""
curHeader = None
curTrimmedStart = None
for line in fa:
line = line.strip()
if line == '':
continue
if line[0] == '>':
if curHeader is not None:
# Add previous seq info to dict
trimmedRange = (curTrimmedStart,
curTrimmedStart + len(curSeq))
untrimmedHeader = "|".join(curHeader.split("|")[:-1])
ret[untrimmedHeader].append(trimmedRange)
curHeader = line[1:].split()[0]
curTrimmedStart = int(curHeader.split('|')[-1])
curSeq = ""
else:
curSeq += line
if curHeader is not None:
# Add final seq info to dict
trimmedRange = (curTrimmedStart,
curTrimmedStart + len(curSeq))
untrimmedHeader = "|".join(curHeader.split("|")[:-1])
ret[untrimmedHeader].append(trimmedRange)
for key in ret.keys():
# Sort by range's start pos
ret[key] = sorted(ret[key], key=lambda x: x[0])
return ret
def validateRanges(seqRanges):
"""Fail if the given range dict contains overlapping ranges or if the
ranges aren't sorted.
"""
for seq, ranges in seqRanges.items():
for i, range in enumerate(ranges):
start = range[0]
if i - 1 >= 0:
range2 = ranges[i - 1]
assert start >= range2[1]
if i + 1 < len(ranges):
range2 = ranges[i + 1]
assert start < range2[0]
def sortCigarByContigAndPos(cigarPath, contigNum):
contigNameKey = 2 if contigNum == 1 else 6
startPosKey = 3 if contigNum == 1 else 7
tempFile = getTempFile()
system("sort -k %d,%d -k %d,%dn %s > %s" % (contigNameKey, contigNameKey, startPosKey, startPosKey, cigarPath, tempFile))
return tempFile
def upconvertCoords(cigarPath, fastaPath, contigNum, outputFile):
"""Convert the coordinates of the given alignment, so that the
alignment refers to a set of trimmed sequences originating from a
contig rather than to the contig itself."""
with open(fastaPath) as f:
seqRanges = getSequenceRanges(f)
validateRanges(seqRanges)
sortedCigarPath = sortCigarByContigAndPos(cigarPath, contigNum)
sortedCigarFile = open(sortedCigarPath)
currentContig = None
currentRangeIdx = None
currentRange = None
for alignment in cigarRead(sortedCigarFile):
# contig1 and contig2 are reversed in python api!!
contig = alignment.contig2 if contigNum == 1 else alignment.contig1
minPos = min(alignment.start2, alignment.end2) if contigNum == 1 else min(alignment.start1, alignment.end1)
maxPos = max(alignment.start2, alignment.end2) if contigNum == 1 else max(alignment.start1, alignment.end1)
if contig in seqRanges:
if contig != currentContig:
currentContig = contig
currentRangeIdx = 0
currentRange = seqRanges[contig][0]
while (minPos >= currentRange[1] or minPos < currentRange[0]) and currentRangeIdx < len(seqRanges[contig]) - 1:
currentRangeIdx += 1
currentRange = seqRanges[contig][currentRangeIdx]
if currentRange[0] <= minPos < currentRange[1]:
if maxPos - 1 > currentRange[1]:
raise RuntimeError("alignment on %s:%d-%d crosses "
"trimmed sequence boundary" %\
(contig,
minPos,
maxPos))
if contigNum == 1:
alignment.start2 -= currentRange[0]
alignment.end2 -= currentRange[0]
alignment.contig2 = contig + ("|%d" % currentRange[0])
else:
alignment.start1 -= currentRange[0]
alignment.end1 -= currentRange[0]
alignment.contig1 = contig + ("|%d" % currentRange[0])
else:
raise RuntimeError("No trimmed sequence containing alignment "
"on %s:%d-%d" % (contig,
minPos,
maxPos))
cigarWrite(outputFile, alignment, False)
os.remove(sortedCigarPath)
| benedictpaten/cactus | src/cactus/blast/upconvertCoordinates.py | Python | mit | 4,797 |
import pickle
from pueue.client.socket import connect_socket, receive_data, process_response
def command_factory(command):
"""A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and returns the unpickled object which is returned by the daemon.
Args:
command (string): The type of payload this should be. This determines
as what kind of instruction this will be interpreted by the daemon.
Returns:
function: The created function.
"""
def communicate(body={}, root_dir=None):
"""Communicate with the daemon.
This function sends a payload to the daemon and returns the unpickled
object sent by the daemon.
Args:
body (dir): Any other arguments that should be put into the payload.
root_dir (str): The root directory in which we expect the daemon.
We need this to connect to the daemons socket.
Returns:
function: The returned payload.
"""
client = connect_socket(root_dir)
body['mode'] = command
# Delete the func entry we use to call the correct function with argparse
# as functions can't be pickled and this shouldn't be send to the daemon.
if 'func' in body:
del body['func']
data_string = pickle.dumps(body, -1)
client.send(data_string)
# Receive message, unpickle and return it
response = receive_data(client)
return response
return communicate
def print_command_factory(command):
"""A factory which returns functions for direct daemon communication.
This factory will create a function which sends a payload to the daemon
and prints the response of the daemon. If the daemon sends a
`response['status'] == 'error'`, the pueue client will exit with `1`.
Args:
command (string): The type of payload this should be. This determines
as what kind of instruction this will be interpreted by the daemon.
Returns:
function: The created function.
"""
def communicate(body={}, root_dir=None):
client = connect_socket(root_dir)
body['mode'] = command
# Delete the func entry we use to call the correct function with argparse
# as functions can't be pickled and this shouldn't be send to the daemon.
if 'func' in body:
del body['func']
data_string = pickle.dumps(body, -1)
client.send(data_string)
# Receive message and print it. Exit with 1, if an error has been sent.
response = receive_data(client)
process_response(response)
return communicate
| Nukesor/Pueuew | pueue/client/factories.py | Python | mit | 2,761 |
__author__ = 'zhonghong'
| linzhonghong/zapi | zapi/core/__init__.py | Python | mit | 26 |
#!/usr/bin/env python
"""
Title : Java program file
Author : JG
Date : dec 2016
Objet : script to create Propertie File Program
in : get infos from yml
out : print infos in properties file
"""
import sys,os
import yaml
import util as u
from random import randint
# ===============================================
# FUNCTION create Java File Properties
# in : get infos from csv
# out : print infos in java file
# ===============================================
def create_properties_file(yml,armaDir):
progDir = u.define_prop_path(armaDir)
filename = progDir+""+u.get_program_name(yml)+".properties"
out = open(filename, 'w')
out.write("#Armadillo Workflow Platform 1.1 (c) Etienne Lord, Mickael Leclercq, Alix Boc, Abdoulaye Banire Diallo, Vladimir Makarenkov"+
"\n#"+yml['author']+
"\n#"+yml['date']+
"\n#Pgrogram info"+
"\nName= "+yml['Program']['name']+
"\nClassName= programs."+u.get_program_name(yml)+""+
"\nEditorClassName= editors."+u.get_program_name(yml)+"Editors"+
"\ndebug= false"+
"\nfilename= C\:\\armadillo2\\data\\properties\\"+u.get_program_name(yml)+".properties")
for paths in yml['Program']['executablePaths']:
out.write("\n"+paths+"="+yml['Program']['executablePaths'][paths])
out.write("\nHelpSupplementary=")
if yml['Program']['helpSupplementary']:
out.write(yml['Program']['helpSupplementary'])
out.write("\nPublication= ")
if yml['Program']['publication']:
out.write(yml['Program']['publication'])
out.write("\nDescription= ")
if yml['Program']['desc']:
out.write(yml['Program']['desc'])
ObjectID = randint(1000000000,9999999999)
out.write("\nObjectID="+u.get_program_name(yml)+"_"+str(ObjectID)+""+
"\nObjectType=Program"+
"\nNoThread=false")
out.write("\nType=")
if yml['Program']['menu']:
out.write(yml['Program']['menu'])
out.write("\nNormalExitValue=")
if yml['Program']['exitValue'] or yml['Program']['exitValue'] == 0:
out.write(str(yml['Program']['exitValue']))
out.write("\nVerifyExitValue=")
if yml['Program']['exitValue']:
out.write('true')
else:
out.write('false')
out.write("\nWebServices=")
if yml['Program']['webServices']:
out.write(yml['Program']['webServices'])
out.write("\nWebsite=")
if yml['Program']['website']:
out.write(yml['Program']['website'])
# Color options
color = u.get_color(yml)
out.write("\ncolorMode = "+color+""+
"\ndefaultColor = "+color+"")
# Inputs types
out.write("\n#INPUTS TYPES")
if len(yml['Inputs']) > 0:
o = ""
s = ""
for op in yml['Inputs']:
if op['type']:
out.write("\nInput"+op['type']+"=Connector"+str(op['connector']))
if op['OneConnectorOnlyFor']:
if o == "":
o = str(op['OneConnectorOnlyFor'])
else:
t = str(op['OneConnectorOnlyFor'])
if t not in o:
o = o+","+t
if op['SolelyConnectors']:
if s == "":
s = str(op['SolelyConnectors'])
else:
t = str(op['SolelyConnectors'])
if t not in o:
s = s+","+t
# Inputs options
if o != "" or s != "":
out.write("\n#INPUTS OPTIONS")
if o != "":
out.write("\nOneConnectorOnlyFor="+o)
if s != "":
out.write("\nSolelyConnectors= "+s)
else:
out.write("\nNO IMPUTS ??\n")
# Inputs Names
out.write("\n#INPUTS Connector text")
tab = ('2','3','4')
for t in tab:
c = ""
if len(yml['Inputs']) > 0:
for op in yml['Inputs']:
o = str(op['connector'])
if t in o or "true" in o:
if c == "":
c = str(op['connectorText'])
else:
s = str(op['connectorText'])
if s not in c:
c = c+", "+s
if c != "":
out.write("\nConnector"+t+"= "+c)
# Number of inputs
out.write("\nnbInput= ")
if yml['Program']['numImputs']:
out.write(str(yml['Program']['numImputs']))
# Outputs values
out.write("\n#OUTPUTS OPTIONS"+
"\nConnector0Output=True"+
"\nOutputResults=Connector0"+
"\nOutputOutputText=Connector0")
if len(yml['Outputs']) > 0:
for op in yml['Outputs']:
if op['type']:
out.write("\nOutput"+op['type']+"=Connector0")
# Default Values
out.write("\n#DEFAULT VALUES"+
"\ndefaultPgrmValues=")
for Panel in yml['Menus']:
pNameS = u.name_without_space(Panel['name'])
if 'Panel' not in Panel:
# Means default option
out.write(""+pNameS+"<>true<>")
else:
for Tab in Panel['Panel']:
if 'Arguments' in Tab:
tName = Tab['tab']
for Arguments in Tab['Arguments']:
cName = Arguments['name']
if 'values' in Arguments and \
Arguments['values'] is not None and \
Arguments['values']['vType'] is not None:
vType = Arguments['values']['vType']
v = u.create_value_name(pNameS,tName,cName,vType)
vDef = str(Arguments['values']['vDefault'])
out.write(v+"<>"+vDef+"<>")
out.write("\n#Cluster")
if 'Cluster' in yml and yml['Cluster'] is not None:
if 'ClusterProgramName' in yml['Cluster']:
out.write("\nClusterProgramName="+yml['Cluster']['ClusterProgramName'])
if 'ExecutableCluster' in yml['Cluster']:
out.write("\nExecutableCluster="+yml['Cluster']['ExecutableCluster'])
if 'version' in yml['Program']:
out.write("\nVersion= "+u.get_program_version(yml)+"")
out.write("\n#Docker")
if 'Docker' in yml and yml['Docker'] is not None:
if 'DockerImage' in yml['Docker']:
out.write("\nDockerImage="+yml['Docker']['DockerImage'])
if 'ExecutableDocker' in yml['Docker']:
out.write("\nExecutableDocker="+yml['Docker']['ExecutableDocker'])
if 'DockerInputs' in yml['Docker']:
out.write("\nDockerInputs="+yml['Docker']['DockerInputs'])
if 'DockerOutputs' in yml['Docker']:
out.write("\nDockerOutputs="+yml['Docker']['DockerOutputs'])
| JeGoi/IPa2 | packages/java_properties.py | Python | mit | 6,870 |
import os
os.environ['KIVY_GL_BACKEND'] = 'gl' #need this to fix a kivy segfault that occurs with python3 for some reason
from kivy.app import App
class TestApp(App):
pass
if __name__ == '__main__':
TestApp().run()
| ISS-Mimic/Mimic | Pi/kivytest/Test_Kivy.py | Python | mit | 225 |
import os.path, sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import json
import falcon
import urllib
import uuid
import settings
import requests
from geopy.geocoders import Nominatim
import geopy.distance
from geopy.distance import vincenty
import datetime
radius = []
radius_maps = []
#geoJSON template to create radius (polygon) on geojson.io
geoJSON_template = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {},
"geometry": {
"type": "Polygon",
"coordinates": [
]
}
}
]
}
class interest(object):
global radius
interested = {}
#radius = []i
def proximity_to_others(self, my_coordinates):
if radius:
for x in radius:
radius_center = (x['center'][0],x['center'][1])
my_coordinates = (my_coordinates[0], my_coordinates[1])
distance = vincenty(radius_center, my_coordinates).kilometers
print("Proximity distance")
print(distance)
return distance, x["center"]
else:
return 0, []
def geojson_io_prox(self, resp, my_coordinates, user_name):
global radius
distance = 0
radius = []
try:
distance,radius = self.proximity_to_others(my_coordinates)
except Exception as e:
print(e)
if not distance or distance < 1:
points = []
start = geopy.Point(my_coordinates[0], my_coordinates[1])
d = geopy.distance.VincentyDistance(kilometers = 1)
for x in range(0,360, 10):
points.append(d.destination(point=start, bearing=x))
print("\n\n POINTS")
print("\n\n")
radius_dict = {
'center': my_coordinates,
'radius': points,
'people': [user_name,],
'created_date': datetime.datetime.utcnow().strftime("%a %b %d %H:%M:%S %Z %Y")
}
radius.append(radius_dict)
print("\n\n RADIUS: ")
print(radius)
print("\n\n")
else:
for x in radius:
if x["center"] == radius:
x['people'].append(
{'name': user_name,
'coordinates':
my_coordinates}
)
def proximity(self,req, resp, my_coordinates, user_name):
# Works out user/client proximity to mytransport API stops
# Works on a radius of 1km. Assumption on average walk time
global radius_maps
google_map_url = "http://www.google.com/maps/place/"
query_params = {"point":"{},{}".format(my_coordinates[0], my_coordinates[1]),
"radius":"1000"}
endpoint ="api/stops"
headers = {"Authorization": "Bearer {}".format(settings.ACCESS_TOKEN)}
request = requests.get("{}/{}".format(settings.API_URL,endpoint),
params=query_params,
headers=headers)
print("Response from api/stops")
print(request.status_code)
response_data = request.json()
print(type(response_data))
if not response_data:
resp.status = falcon.HTTP_200
your_radius_map = ""
for x in radius_maps:
if x["center"] == my_coordinates:
your_radius_map = x["geoJSON_url"]
messge_dict = {'message' :
"No stops in your area, adding you to interest area", "maps": your_radius_map}
resp.body = json.dumps(messge_dict)
return False
else:
map_list = []
message_dict = {"message":"", "maps":[]}
for x in response_data:
print(x)
if 'geometry' in x:
coordinates = x["geometry"]["coordinates"]
map_list.append("{}{},{}".format(google_map_url,
coordinates[1],
coordinates[0]))
message_dict["maps"] = map_list
if message_dict:
message_dict["message"] = """You have existing stops within 1km
of your location"""
else:
message_dict["messsage"] = """You\shave no existing stops nearby,
we will combine your interest in a stop with others in the area"""
resp.body = json.dumps(message_dict)
resp.status = falcon.HTTP_200
return True
#return True
def geopy_coordinates(self, address,resp):
try:
geolocator = Nominatim()
location = geolocator.geocode(address)
if location.latitude and location.longitude:
return [location.latitude, location.longitude]
except Exception as e:
print(e)
resp.body = """{'message':'Bad address,
try being more specific and try agai'}"""
resp.status = falcon.HTTP_400
def on_get(self, req, resp):
resp_dict = {"message":"Post request needed with GeoLocation data"}
resp.body = json.dumps(resp_dict)
resp.status = falcon.HTTP_200
def on_post(self, req, resp):
# Main API method, post the following
'''
POST Request
data type: JSON
Required: name, address or coordinates
data format : {
"name" : "Yourname",
"address" : "Your number and street address, province, etc"
"geometry" : { "coordinates" : ["x", "y"] }
'''
global radius_maps
global radius
print(req.headers)
user_name = ""
post_data = json.load(req.stream)
print(post_data)
if "name" in post_data:
user_name = post_data["name"]
print("Username IF statement")
print(user_name)
if "geometry" in post_data:
if not self.proximity(req,resp, post_data["geometry"]["coordinates"],user_name):
self.geojson_io_prox(resp, post_data["geometry"]["coordinates"],user_name)
elif post_data["address"]:
if "address" in post_data:
my_coordinates = self.geopy_coordinates(post_data["address"],resp)
print("BASED ON ADDRESS")
proximity = self.proximity(req, resp, my_coordinates, user_name)
print("PROXIMITY")
print(proximity)
if proximity == False:
print("NO routes")
self.geojson_io_prox(resp,my_coordinates, user_name)
else:
falcon.HTTPMissingParam
resp_dict = { 'message' :
'Please supply a address or coordinates (long,lat)'}
# json.dumps allows proper formating of message
resp.body = json.dumps(resp_dict)
print("Current Radius")
print(radius)
radius_list = []
radius_maps = []
for x in radius:
for y in x['radius']:
radius_list.append([y[1],y[0]])
radius_list.append([x['radius'][0][1],x['radius'][0][0]])
geoJSON_template['features'][0]['geometry']['coordinates'].append(radius_list)
radius_maps.append( {
'center': x['center'],
'geoJSON': geoJSON_template,
'geoJSON_url' : "http://geojson.io/#map=5/{}/{}&data=data:application/json,{}".format(
x['center'][1], x['center'][0], urllib.quote(json.dumps(geoJSON_template).encode()) )
}
)
#resp.body
print(radius_maps)
| c-goosen/mytransport-hackathon | api/endpoints/interest.py | Python | mit | 7,851 |
def main(**kwargs):
print('foo foo')
| lorien/runscript | test/script/foo.py | Python | mit | 41 |
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=0 command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=0
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=0, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=0. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=0,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
"""
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
wait_until,
)
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.debug("Send 5 transactions from node2 (to its own address)")
tx_creation_time_lower = int(time.time())
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
tx_creation_time_higher = int(time.time())
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
tx_creation_time = self.nodes[0].getmempoolentry(txid=last_txid)['time']
assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"], timeout=1)
wait_until(lambda: self.nodes[2].getmempoolinfo()["loaded"], timeout=1)
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug('Verify time is loaded correctly')
assert_equal(tx_creation_time, self.nodes[0].getmempoolentry(txid=last_txid)['time'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[0].getrawmempool()), 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: self.nodes[1].getmempoolinfo()["loaded"])
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
| tjps/bitcoin | test/functional/mempool_persist.py | Python | mit | 6,912 |
import argparse
from collections import defaultdict, Counter, deque
import random
import json
import time
from tqdm import tqdm
import wikipedia
class MarkovModel(object):
def __init__(self):
self.states = defaultdict(lambda: Counter())
self.totals = Counter()
def add_sample(self, state, followup):
self.states[state][followup] += 1
self.totals[state] += 1
def generate(self):
result = []
for followup in self.iter_chain():
result.append(followup)
return result
def iter_chain(self, state=tuple()):
while state in self.states:
followup = self.next(state)
state = state[1:] + followup
for token in followup:
yield token
def next(self, state):
r = random.randint(0, self.totals[state] - 1)
for followup, weight in self.states[state].items():
r -= weight
if r < 0:
return followup
raise ValueError("Mismatch of totals / weights for state {}".format(state))
def to_json(self):
converted = {' '.join(state): list(followups.keys()) for state, followups in self.states.items()}
return json.dumps(converted)
def iter_states(tokens, state_size, start_state=tuple(), end_marker=None):
# First transition is from empty state to first token-based state
yield start_state, tuple(tokens[0:state_size])
state = tuple(tokens[0:state_size])
for token in tokens[state_size:]:
# Each additional token means last state to that token
yield state, (token,)
# New state is last {state_size} tokens we yielded
state = state[1:] + (token,)
# End is marked by None
yield state, end_marker
def tokenize_story(story):
story = deque(story)
yield "\n"
while len(story) > 0:
token = eat_one_token(story)
if token is not None:
yield token
def eat_one_token(story):
while len(story) > 0 and isinvalid(story[0]):
story.popleft()
if len(story) == 0:
return None
if isalnum(story[0]):
return eat_word(story)
if ispunctuation(story[0]):
return eat_punctuation(story)
if isnewline(story[0]):
return eat_newline(story)
def isinvalid(char):
return not isalnum(char) and not ispunctuation(char) and not isnewline(char)
def isalnum(char):
return char.isalnum() or char == "'" or char == "’"
def ispunctuation(char):
return char in ",.-!?:&"
def isnewline(char):
return char == '\n'
def eat_word(story):
word = [story.popleft()]
while len(story) > 0 and isalnum(story[0]):
word.append(story.popleft())
return ''.join(word)
def eat_punctuation(story):
token = [story.popleft()]
while len(story) > 0 and ispunctuation(story[0]):
token.append(story.popleft())
return ''.join(token)
def eat_newline(story):
while len(story) > 0 and story[0].isspace():
story.popleft()
return '\n'
def load_story(filenames):
stories = []
for filename in filenames:
with open(filename) as fp:
story = fp.read()
if filename.endswith('.ftxt'):
story = remove_single_newlines(story)
stories.append(story)
return '\n'.join(stories)
def remove_single_newlines(story):
paragraphs = [[]]
for line in story.splitlines():
if len(line.strip()) == 0:
paragraphs.append([])
else:
paragraphs[-1].append(line)
return '\n'.join(' '.join(x for x in p) for p in paragraphs)
def load_wikipedia(num_articles):
lines = []
while num_articles > 0:
chunk = min(10, num_articles)
num_articles -= 10
for article in wikipedia.random(chunk):
try:
page = wikipedia.page(article)
except wikipedia.DisambiguationError as ex:
page = wikipedia.page(ex.args[1][0])
print(article)
lines.extend(x for x in page.content.splitlines() if not x.startswith('==') and len(x) > 0)
return '\n'.join(lines)
def main(args):
model = MarkovModel()
if args.mode == 'txt':
story = load_story(args.txt)
elif args.mode == 'wikipedia':
story = load_wikipedia(100)
else:
raise ValueError("invalid mode {}".format(args.mode))
tokens = list(tqdm(tokenize_story(story), desc="tokenizing"))
for state, followup in tqdm(iter_states(tokens, 3, start_state=tuple('\n'), end_marker=()), desc="building model"):
model.add_sample(state, followup)
print("Saving Model...")
with open("model.json", "w") as fp:
fp.write(model.to_json())
print("Generating Story:")
for token in model.iter_chain(tuple('\n')):
if not ispunctuation(token):
print(" ", end="")
print(token, end="", flush=True)
time.sleep(0.05)
def parse_args():
ap = argparse.ArgumentParser()
ap.add_argument('mode', choices=['txt', 'wikipedia'])
ap.add_argument('--txt', action='append')
return ap.parse_args()
if __name__ == '__main__':
main(parse_args())
| bschug/neverending-story | markov.py | Python | mit | 5,169 |
from sanic.exceptions import *
class GatcoException(SanicException):
pass | gonrin/gatco | gatco/exceptions.py | Python | mit | 78 |
SCORES = {'A': 100, 'B': 14, 'C': 9, 'D': 28, 'E': 145, 'F': 12, 'G': 3,
'H': 10, 'I': 200, 'J': 100, 'K': 114, 'L': 100, 'M': 25,
'N': 450, 'O': 80, 'P': 2, 'Q': 12, 'R': 400, 'S': 113, 'T': 405,
'U': 11, 'V': 10, 'W': 10, 'X': 3, 'Y': 210, 'Z': 23}
def sexy_name(name):
name_score = sum(SCORES.get(a, 0) for a in name.upper())
if name_score >= 600:
return 'THE ULTIMATE SEXIEST'
elif name_score >= 301:
return 'VERY SEXY'
elif name_score >= 60:
return 'PRETTY SEXY'
return 'NOT TOO SEXY'
| the-zebulan/CodeWars | katas/beta/how_sexy_is_your_name.py | Python | mit | 566 |
l = list(range(1, 20 + 1, 2)) # this is wasteful in this program
for i in l:
print(i) | thoughtarray/IntroToProgramming | Chapter 4 Working with Lists/4_6.py | Python | mit | 91 |
#!/usr/bin/env python3
import json
import os
import unittest
import requests
AGNOS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)))
MANIFEST = os.path.join(AGNOS_DIR, "agnos.json")
class TestAgnosUpdater(unittest.TestCase):
def test_manifest(self):
with open(MANIFEST) as f:
m = json.load(f)
for img in m:
r = requests.head(img['url'])
r.raise_for_status()
self.assertEqual(r.headers['Content-Type'], "application/x-xz")
if not img['sparse']:
assert img['hash'] == img['hash_raw']
if __name__ == "__main__":
unittest.main()
| commaai/openpilot | selfdrive/hardware/tici/test_agnos_updater.py | Python | mit | 595 |
import argparse
import smtplib
import mimetypes
import os.path as path
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
def parseOptions():
parser = argparse.ArgumentParser(description='Send e-books to my Kindle Paperwhite.')
parser.add_argument('file', type=str, help='The e-book file.')
parser.add_argument('-c', type=str, default = 'y', help='Convert?.')
parser.add_argument('-t', type=str, default = 'n', help='Test mode, email sent to yahoo.')
args = parser.parse_args()
# print args
return args
def send2Kindle():
opt = parseOptions()
msg = MIMEMultipart()
msg['From'] = 'YOUR_EMAIL_ADDR'
msg['To'] = 'YOUR_KINDLE_EMAIL_ADDR' if opt.t == 'n' else 'YOUR_DEBUG_EMAIL_ADDR'
# TODO check if option is valid
msg['Subject'] = '' if opt.c == 'n' else 'Convert'
fileToSend = opt.file # the file to attach
# TODO check if file exists
if not path.exists(fileToSend):
return IOError
# get maintype and subtype for MIMEBase
ctype, encoding = mimetypes.guess_type(fileToSend)
maintype, subtype = ctype.split("/", 1)
# read (only) 1 attachment file
fp = open(fileToSend,"rb")
attachment = MIMEBase(maintype, subtype)
attachment.set_payload(fp.read())
fp.close()
encoders.encode_base64(attachment)
attachment.add_header("Content-Disposition", "attachment", filename = fileToSend.split('/')[-1])
msg.attach(attachment)
# set mail server info, using yahoo.com as an example
username = str('[email protected]')
password = str('email_psd') # Use app generated for security purposes
try :
server = smtplib.SMTP("smtp.mail.yahoo.com",587) # an exanple setup for yahoo mail server
print 'Logging into {server}'.format(server='mail.yahoo.com')
server.starttls()
server.login(username,password)
print 'Login success!'
print 'Sending "{file}" to {to}.'.format(file = fileToSend, to = msg['To'])
server.sendmail(msg['From'], msg['To'], msg.as_string())
server.quit()
print 'Email has been sent successfully!'
except :
print 'Cannot send the Email!'
if __name__ == '__main__':
send2Kindle()
| comicxmz001/Send2Kindle | Send2Kindle_command_line.py | Python | mit | 2,130 |
import deep_architect.searchers.common as se
import numpy as np
# NOTE: this searcher does not do any budget adjustment and needs to be
# combined with an evaluator that does.
class SuccessiveNarrowing(se.Searcher):
def __init__(self, search_space_fn, num_initial_samples, reduction_factor,
reset_default_scope_upon_sample):
se.Searcher.__init__(self, search_space_fn,
reset_default_scope_upon_sample)
self.num_initial_samples = num_initial_samples
self.reduction_factor = reduction_factor
self.vals = [None for _ in range(num_initial_samples)]
self.num_remaining = num_initial_samples
self.idx = 0
self.queue = []
for _ in range(num_initial_samples):
inputs, outputs = search_space_fn()
hyperp_value_lst = se.random_specify(outputs)
self.queue.append(hyperp_value_lst)
def sample(self):
assert self.idx < len(self.queue)
hyperp_value_lst = self.queue[self.idx]
(inputs, outputs) = self.search_space_fn()
se.specify(outputs, hyperp_value_lst)
idx = self.idx
self.idx += 1
return inputs, outputs, hyperp_value_lst, {"idx": idx}
def update(self, val, searcher_eval_token):
assert self.num_remaining > 0
idx = searcher_eval_token["idx"]
assert self.vals[idx] is None
self.vals[idx] = val
self.num_remaining -= 1
# generate the next round of architectures by keeping the best ones.
if self.num_remaining == 0:
num_samples = int(self.reduction_factor * len(self.queue))
assert num_samples > 0
top_idxs = np.argsort(self.vals)[::-1][:num_samples]
self.queue = [self.queue[idx] for idx in top_idxs]
self.vals = [None for _ in range(num_samples)]
self.num_remaining = num_samples
self.idx = 0
# run simple successive narrowing on a single machine.
def run_successive_narrowing(search_space_fn, num_initial_samples,
initial_budget, get_evaluator, extract_val_fn,
num_samples_reduction_factor,
budget_increase_factor, num_rounds,
get_evaluation_logger):
num_samples = num_initial_samples
searcher = SuccessiveNarrowing(search_space_fn, num_initial_samples,
num_samples_reduction_factor)
evaluation_id = 0
for round_idx in range(num_rounds):
budget = initial_budget * (budget_increase_factor**round_idx)
evaluator = get_evaluator(budget)
for idx in range(num_samples):
(inputs, outputs, hyperp_value_lst,
searcher_eval_token) = searcher.sample()
results = evaluator.eval(inputs, outputs)
val = extract_val_fn(results)
searcher.update(val, searcher_eval_token)
logger = get_evaluation_logger(evaluation_id)
logger.log_config(hyperp_value_lst, searcher_eval_token)
logger.log_results(results)
evaluation_id += 1
num_samples = int(num_samples_reduction_factor * num_samples)
| negrinho/deep_architect | deep_architect/searchers/successive_narrowing.py | Python | mit | 3,234 |
# Copyright (c) 2015 The Phtevencoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Dummy Socks5 server for testing.
'''
from __future__ import print_function, division, unicode_literals
import socket, threading, Queue
import traceback, sys
### Protocol constants
class Command:
CONNECT = 0x01
class AddressType:
IPV4 = 0x01
DOMAINNAME = 0x03
IPV6 = 0x04
### Utility functions
def recvall(s, n):
'''Receive n bytes from a socket, or fail'''
rv = bytearray()
while n > 0:
d = s.recv(n)
if not d:
raise IOError('Unexpected end of stream')
rv.extend(d)
n -= len(d)
return rv
### Implementation classes
class Socks5Configuration(object):
'''Proxy configuration'''
def __init__(self):
self.addr = None # Bind address (must be set)
self.af = socket.AF_INET # Bind address family
self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
class Socks5Command(object):
'''Information about an incoming socks5 command'''
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd # Command (one of Command.*)
self.atyp = atyp # Address type (one of AddressType.*)
self.addr = addr # Address
self.port = port # Port to connect to
self.username = username
self.password = password
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection(object):
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
self.peer = peer
def handle(self):
'''
Handle socks5 request according to RFC1928
'''
try:
# Verify socks version
ver = recvall(self.conn, 1)[0]
if ver != 0x05:
raise IOError('Invalid socks version %i' % ver)
# Choose authentication method
nmethods = recvall(self.conn, 1)[0]
methods = bytearray(recvall(self.conn, nmethods))
method = None
if 0x02 in methods and self.serv.conf.auth:
method = 0x02 # username/password
elif 0x00 in methods and self.serv.conf.unauth:
method = 0x00 # unauthenticated
if method is None:
raise IOError('No supported authentication method was offered')
# Send response
self.conn.sendall(bytearray([0x05, method]))
# Read authentication (optional)
username = None
password = None
if method == 0x02:
ver = recvall(self.conn, 1)[0]
if ver != 0x01:
raise IOError('Invalid auth packet version %i' % ver)
ulen = recvall(self.conn, 1)[0]
username = str(recvall(self.conn, ulen))
plen = recvall(self.conn, 1)[0]
password = str(recvall(self.conn, plen))
# Send authentication response
self.conn.sendall(bytearray([0x01, 0x00]))
# Read connect request
(ver,cmd,rsv,atyp) = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
raise IOError('Unhandled command %i in connect request' % cmd)
if atyp == AddressType.IPV4:
addr = recvall(self.conn, 4)
elif atyp == AddressType.DOMAINNAME:
n = recvall(self.conn, 1)[0]
addr = str(recvall(self.conn, n))
elif atyp == AddressType.IPV6:
addr = recvall(self.conn, 16)
else:
raise IOError('Unknown address type %i' % atyp)
port_hi,port_lo = recvall(self.conn, 2)
port = (port_hi << 8) | port_lo
# Send dummy response
self.conn.sendall(bytearray([0x05, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
print('Proxy: ', cmdin)
# Fall through to disconnect
except Exception,e:
traceback.print_exc(file=sys.stderr)
self.serv.queue.put(e)
finally:
self.conn.close()
class Socks5Server(object):
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(conf.addr)
self.s.listen(5)
self.running = False
self.thread = None
self.queue = Queue.Queue() # report connections and exceptions to client
def run(self):
while self.running:
(sockconn, peer) = self.s.accept()
if self.running:
conn = Socks5Connection(self, sockconn, peer)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
def start(self):
assert(not self.running)
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
self.running = False
# connect to self to end run loop
s = socket.socket(self.conf.af)
s.connect(self.conf.addr)
s.close()
self.thread.join()
| ravenbyron/phtevencoin | qa/rpc-tests/test_framework/socks5.py | Python | mit | 5,705 |
# -*- coding: utf-8 -*-
"""Implement directed graph abstract data type."""
from __future__ import print_function
from .stack import Stack
from .queue import Queue
class Graph(object):
"""Implement a directed graph."""
def __init__(self):
"""Initialize a graph with no nodes or edges."""
self._nodes = {}
def add_node(self, n):
"""Add a node to the graph.
Do nothing if it's already in the graph.
"""
self._nodes.setdefault(n, {})
def nodes(self):
"""Return a list of all nodes."""
return list(self._nodes)
def _edges(self):
"""Return a generator of all edges (represented by tuples)."""
for node in self._nodes:
for neighbor in self._nodes[node]:
yield (node, neighbor, self._nodes[node][neighbor])
def edges(self):
"""Return a list of all edges (represented by tuples).
The tuples are generated like (node1, node2, weight).
"""
return list(self._edges())
def add_edge(self, a, b, weight):
"""Add an edge between two nodes.
If those nodes don't exist yet in the graph, create them.
"""
self.add_node(a)
self.add_node(b)
self._nodes[a][b] = weight
def del_node(self, n):
"""Delete a node in the graph.
Deletes all the edges involving the node as well. Raises
ValueError if the node isn't in the graph.
"""
try:
del self._nodes[n]
except KeyError:
raise ValueError('Node not in graph')
for neighbors in self._nodes.values():
try:
del neighbors[n]
except KeyError:
pass
def del_edge(self, a, b):
"""Delete an edge in the graph.
Raises ValueError if the edge isn't in the graph.
"""
try:
del self._nodes[a][b]
except KeyError:
raise ValueError('Node not in graph')
def has_node(self, n):
"""Return whether a node is in the graph."""
return n in self._nodes
def neighbors(self, n):
"""Return a list of all the neighbors a node has."""
return list(self._nodes[n])
def adjacent(self, n1, n2):
"""Return whether two nodes are adjacent.
Raises a ValueError if either node is not in the graph.
"""
try:
return n2 in self._nodes[n1]
except KeyError:
raise ValueError('Node not in graph')
def _traverse(self, start, nonempty, add, remove):
"""Traverse iteratively.
Uses given functions (nonempty, add, remove) to store
nodes. Depending on the behavior of these functions, this
traversal may be performed differently.
"""
add(start)
result = []
while nonempty():
root = remove()
if root not in result:
result.append(root)
for node in self.neighbors(root):
add(node)
return result
def depth_first_traversal(self, start):
"""Return a list of nodes as found in depth-first order."""
stack = Stack()
return self._traverse(start, stack.size, stack.push, stack.pop)
def breadth_first_traversal(self, start):
"""Return a list of nodes as found in breadth-first order."""
queue = Queue()
return self._traverse(start, queue.size, queue.enqueue, queue.dequeue)
def shortest_path(self, start, end):
"""Dijkstra's algorithm."""
distance_from_start = {start: 0}
unvisited = set(self.nodes())
parents = {}
while end in unvisited:
current = min((weight, node)
for node, weight
in distance_from_start.items()
if node in unvisited)[1]
for neighbor in self.neighbors(current):
weight = self._nodes[current][neighbor] + distance_from_start[current]
dist = distance_from_start.setdefault(neighbor, weight)
if weight <= dist:
distance_from_start[neighbor] = weight
parents[neighbor] = current
unvisited.remove(current)
s = []
weight = 0
current = end
while current in parents:
s.append(current)
weight += self._nodes[parents[current]][current]
current = parents[current]
s.append(start)
return s[::-1], weight
if __name__ == '__main__':
g = Graph()
g.add_edge('0-0', '1-0', 0)
g.add_edge('0-0', '1-1', 0)
g.add_edge('1-0', '2-0', 0)
g.add_edge('1-0', '2-1', 0)
g.add_edge('1-1', '2-2', 0)
g.add_edge('1-1', '2-3', 0)
print(r'''Graph:
0-0
/ \
1-0 1-1
/ \ / \
2-0 2-1 2-2 2-3''')
print('depth first ', g.depth_first_traversal('0-0'))
print('breadth first ', g.breadth_first_traversal('0-0'))
| welliam/data-structures | src/graph.py | Python | mit | 5,049 |
import os
import shutil
import unittest
from django.utils import six
from django_node import node, npm
from django_node.node_server import NodeServer
from django_node.server import server
from django_node.base_service import BaseService
from django_node.exceptions import (
OutdatedDependency, MalformedVersionInput, NodeServiceError, NodeServerAddressInUseError, NodeServerTimeoutError,
ServiceSourceDoesNotExist, MalformedServiceName
)
from django_node.services import EchoService
from .services import TimeoutService, ErrorService
from .utils import StdOutTrap
TEST_DIR = os.path.abspath(os.path.dirname(__file__))
PATH_TO_NODE_MODULES = os.path.join(TEST_DIR, 'node_modules')
DEPENDENCY_PACKAGE = 'yargs'
PATH_TO_INSTALLED_PACKAGE = os.path.join(PATH_TO_NODE_MODULES, DEPENDENCY_PACKAGE)
PACKAGE_TO_INSTALL = 'jquery'
PATH_TO_PACKAGE_TO_INSTALL = os.path.join(PATH_TO_NODE_MODULES, PACKAGE_TO_INSTALL)
PATH_TO_PACKAGE_JSON = os.path.join(TEST_DIR, 'package.json')
echo_service = EchoService()
timeout_service = TimeoutService()
error_service = ErrorService()
class TestDjangoNode(unittest.TestCase):
maxDiff = None
def setUp(self):
self.package_json_contents = self.read_package_json()
def tearDown(self):
if os.path.exists(PATH_TO_NODE_MODULES):
shutil.rmtree(PATH_TO_NODE_MODULES)
self.write_package_json(self.package_json_contents)
if server.is_running:
# Reset the server
server.stop()
def read_package_json(self):
with open(PATH_TO_PACKAGE_JSON, 'r') as package_json_file:
return package_json_file.read()
def write_package_json(self, contents):
with open(PATH_TO_PACKAGE_JSON, 'w+') as package_json_file:
package_json_file.write(contents)
def test_node_is_installed(self):
self.assertTrue(node.is_installed)
def test_node_version_raw(self):
self.assertTrue(isinstance(node.version_raw, six.string_types))
self.assertGreater(len(node.version_raw), 0)
def test_node_version(self):
self.assertTrue(isinstance(node.version, tuple))
self.assertGreaterEqual(len(node.version), 3)
def test_npm_is_installed(self):
self.assertTrue(npm.is_installed)
def test_npm_version_raw(self):
self.assertTrue(isinstance(npm.version_raw, six.string_types))
self.assertGreater(len(npm.version_raw), 0)
def test_npm_version(self):
self.assertTrue(isinstance(npm.version, tuple))
self.assertGreaterEqual(len(npm.version), 3)
def test_ensure_node_installed(self):
node.ensure_installed()
def test_ensure_npm_installed(self):
npm.ensure_installed()
def test_ensure_node_version_greater_than(self):
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, 'v99999.0.0')
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, '99999.0.0')
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, (None,))
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, (10,))
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, (999999999,))
self.assertRaises(MalformedVersionInput, node.ensure_version_gte, (999999999, 0,))
self.assertRaises(OutdatedDependency, node.ensure_version_gte, (999999999, 0, 0,))
node.ensure_version_gte((0, 0, 0,))
node.ensure_version_gte((0, 9, 99999999))
node.ensure_version_gte((0, 10, 33,))
def test_ensure_npm_version_greater_than(self):
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, 'v99999.0.0')
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, '99999.0.0')
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, (None,))
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, (10,))
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, (999999999,))
self.assertRaises(MalformedVersionInput, npm.ensure_version_gte, (999999999, 0,))
self.assertRaises(OutdatedDependency, npm.ensure_version_gte, (999999999, 0, 0,))
npm.ensure_version_gte((0, 0, 0,))
npm.ensure_version_gte((0, 9, 99999999))
npm.ensure_version_gte((2, 1, 8,))
def test_node_run_returns_output(self):
stderr, stdout = node.run('--version',)
stdout = stdout.strip()
self.assertEqual(stdout, node.version_raw)
def test_npm_run_returns_output(self):
stderr, stdout = npm.run('--version',)
stdout = stdout.strip()
self.assertEqual(stdout, npm.version_raw)
def test_npm_install_can_install_dependencies(self):
npm.install(TEST_DIR)
self.assertTrue(os.path.exists(PATH_TO_NODE_MODULES))
self.assertTrue(os.path.exists(PATH_TO_INSTALLED_PACKAGE))
def test_node_server_services_can_be_validated(self):
class MissingSource(BaseService):
pass
self.assertRaises(ServiceSourceDoesNotExist, MissingSource.validate)
class AbsoluteUrlName(EchoService):
name = 'http://foo.com'
self.assertRaises(MalformedServiceName, AbsoluteUrlName.validate)
class MissingOpeningSlashName(EchoService):
name = 'foo/bar'
self.assertRaises(MalformedServiceName, MissingOpeningSlashName.validate)
def test_node_server_services_are_discovered(self):
for service in (EchoService, ErrorService, TimeoutService):
self.assertIn(service, server.services)
def test_node_server_can_start_and_stop(self):
self.assertIsInstance(server, NodeServer)
server.start()
self.assertTrue(server.is_running)
self.assertTrue(server.test())
server.stop()
self.assertFalse(server.is_running)
self.assertFalse(server.test())
server.start()
self.assertTrue(server.is_running)
self.assertTrue(server.test())
server.stop()
self.assertFalse(server.is_running)
self.assertFalse(server.test())
def test_node_server_process_can_rely_on_externally_controlled_processes(self):
self.assertFalse(server.test())
new_server = NodeServer()
new_server.start()
self.assertTrue(server.test())
new_server.stop()
self.assertFalse(new_server.test())
self.assertFalse(server.test())
def test_node_server_process_can_raise_on_port_collisions(self):
self.assertFalse(server.test())
new_server = NodeServer()
new_server.start()
self.assertTrue(server.test())
self.assertEqual(server.address, new_server.address)
self.assertEqual(server.port, new_server.port)
self.assertRaises(NodeServerAddressInUseError, server.start, use_existing_process=False)
new_server.stop()
self.assertFalse(server.test())
server.start(use_existing_process=False)
self.assertTrue(server.test())
def test_node_server_config_is_as_expected(self):
config = server.get_config()
self.assertEqual(config['address'], server.address)
self.assertEqual(config['port'], server.port)
self.assertEqual(config['startup_output'], server.get_startup_output())
services = (EchoService, ErrorService, TimeoutService)
self.assertEqual(len(config['services']), len(services))
service_names = [obj['name'] for obj in config['services']]
service_sources = [obj['path_to_source'] for obj in config['services']]
for service in services:
self.assertIn(service.get_name(), service_names)
self.assertIn(service.get_path_to_source(), service_sources)
def test_node_server_echo_service_pumps_output_back(self):
response = echo_service.send(echo='test content')
self.assertEqual(response.text, 'test content')
def test_node_server_throws_timeout_on_long_running_services(self):
self.assertRaises(NodeServerTimeoutError, timeout_service.send)
def test_node_server_error_service_works(self):
self.assertRaises(NodeServiceError, error_service.send)
def test_node_server_config_management_command_provides_the_expected_output(self):
from django_node.management.commands.node_server_config import Command
with StdOutTrap() as output:
Command().handle()
self.assertEqual(''.join(output), server.get_serialised_config()) | markfinger/django-node | tests/tests.py | Python | mit | 8,492 |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
WGS = ['G69145', 'G71602', 'G71608', 'G76270']
### scatch import duplicate text files
# fname = "/seq/picard_aggregation/G69145/NA12878/current/NA12878.duplicate_metrics"
# gdup = pd.read_csv(fname, "\t", skiprows=range(10), comment="#", names=['bin', 'val'])
### read in duplication metric details
metric_dict = {}
for wg in WGS:
fname = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.duplicate_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
# read in depth info
# fdepth = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.depthSM"
metric_dict[wg] = mvals
# put into dataframe
df_wgs = pd.DataFrame.from_dict(metric_dict,orient='index')
df_wgs.columns = nMetrics
df_wgs['platform'] = 'WGS'
### insert size
metric_dict = {}
for wg in WGS:
fname = "/seq/picard_aggregation/" + wg + "/NA12878/current/NA12878.insert_size_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
metric_dict[wg] = mvals
# put into dataframe
insert_wgs = pd.DataFrame.from_dict(metric_dict,orient='index')
insert_wgs.columns = nMetrics
insert_wgs['platform'] = 'WGS'
### Nexome dup data
NexomeIDs = ['359781',
'359877',
'360457',
'361337',
'388072',
'381153',
'364464',
'377582',
'384210',
'384498',
'372754',
'386674',
'393370',
'385972',
'373158',
'379118',
'385390',
'391382',
'383609',
'386068',
'383320',
'383416',
'382684',
'392292',
'376734',
'376014']
metric_dict = {}
for nx in NexomeIDs:
fname = "/seq/picard_aggregation/D5227/NexPond-" + nx + "/current/NexPond-" + nx + ".duplicate_metrics"
lines=list(csv.reader(open(fname)))
nMetrics = lines[6][0].split('\t')
mvals = lines[7][0].split('\t')
metric_dict[nx] = mvals
# put into dataframe
df_nex = pd.DataFrame.from_dict(metric_dict,orient='index')
df_nex.columns = nMetrics
df_nex['platform'] = 'Nexome'
#concoatonate wgs and nexome
frames = [df_wgs, df_nex]
df_merge = pd.concat(frames)
fout = '/home/unix/hogstrom/nexpond_wgs_dup_metrics.txt'
df_merge.to_csv(fout)
### read in locally
import matplotlib.pyplot as plt
import pandas as pd
fin = "/Users/hogstrom/Dropbox (MIT)/genome_analysis/published_data/nexpond_wgs_dup_metrics.txt"
g = pd.read_csv(fin,index_col=0)
g.boxplot('ESTIMATED_LIBRARY_SIZE', by='platform')
g.boxplot('PERCENT_DUPLICATION', by='platform')
g.boxplot('READ_PAIR_DUPLICATES', by='platform')
# plt.plot(g['ESTIMATED_LIBRARY_SIZE'].values,g['PERCENT_DUPLICATION'].values) | lhogstrom/ThornsInRoses | seq_duplication/dup_averages_wgs_nexome.py | Python | mit | 2,668 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questions', '0013_auto_20160210_0400'),
]
operations = [
migrations.AlterField(
model_name='category',
name='order',
field=models.PositiveIntegerField(default=1),
),
migrations.AlterField(
model_name='question',
name='order',
field=models.PositiveIntegerField(default=1),
),
]
| moshthepitt/answers | questions/migrations/0014_auto_20160210_0406.py | Python | mit | 573 |
import tensorflow as tf
import tensorflow.contrib.slim as slim
class BaseReader(object):
def read(self):
raise NotImplementedError()
class ImageReader(BaseReader):
def __init__(self):
self.width = None
self.height = None
def get_image_size(self):
return self.width, self.height
def set_image_size(self, width, height):
self.width = width
self.height = height
def read(self, filename, num_classes, batch_size=256, feature_map=None):
assert(self.width is not None and self.height is not None)
assert(self.width > 0 and self.height > 0)
reader = tf.TFRecordReader()
tf.add_to_collection(filename, batch_size) # is this really needed?
key, value = reader.read_up_to(filename, batch_size)
if feature_map is None:
feature_map = {
'label': tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([self.width * self.height], tf.int64),
}
features = tf.parse_example(value, features=feature_map)
images = tf.cast(features["image_raw"], tf.float32) * (1. / 255)
if feature_map.get('label') is not None:
labels = tf.cast(features['label'], tf.int32)
one_hot = tf.map_fn(lambda x: tf.cast(slim.one_hot_encoding(x, num_classes), tf.int32), labels)
one_hot = tf.reshape(one_hot, [-1, num_classes])
return one_hot, images
empty_labels = tf.reduce_sum(tf.zeros_like(images), axis=1)
return empty_labels, images
| ml-101/templates | readers.py | Python | mit | 1,577 |
#!/usr/bin/python2.7
from django.http import HttpResponse
from AfricasTalkingGateway import AfricasTalkingGateway, AfricasTalkingGatewayException
from reminder.models import Reminder
import sys
import os
import django
sys.path.append("/home/foxtrot/Dropbox/tunza_v2/")
os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.local"
django.setup()
username = "OtisKe"
apikey = "07984423a278ead54fee35d3daf956598deb51405b27fe70f1e2dfe964be5c04"
gateway = AfricasTalkingGateway(username, apikey)
# replace this line with a list of numbers from the
# patient__patient_contact linked to reminder model
reminder_service = Reminder.objects.values_list('service_id',
'patient_id',
'service__service_name',
'service__service_url',
'patient__patient_contact', )
# replace this message with service about from service__service_about
# linked to reminder model
def voice_callback(request):
if request.method == 'POST':
is_active = request.values.get('isActive', None)
session_id = request.values.get('sessionId', None)
caller_number = request.values.get('callerNumber', None)
direction = request.values.get('direction', None)
print "is_active -> ", is_active
if is_active == str(0):
# Compose the response
duration = request.values.get('durationInSeconds', None)
currency_code = request.values.get('currencyCode', None)
amount = request.values.get('amount', None)
# update session info to Redis
print duration, currency_code, amount
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to the reminder system</Say>'
respond += '</Response>'
resp = HttpResponse(respond, 200, content_type='application/xml')
resp['Cache-Control'] = 'no-cache'
return resp
if is_active == str(1):
# Compose the response
respond = '<?xml version="1.0" encoding="UTF-8"?>'
respond += '<Response>'
respond += '<Say playBeep="false" >Welcome to mTunza.org</Say>'
respond += '</Response>'
resp = HttpResponse(respond, 200, content_type='application/xml')
resp['Cache-Control'] = 'no-cache'
return resp
else:
resp = HttpResponse('Bad Request', 400, content_type='application/xml', )
resp['Cache-Control'] = 'no-cache'
return resp
| omwomotieno/tunza_v3 | call/callback.py | Python | mit | 2,707 |
import requests
# https://github.com/kennethreitz/grequests/issues/103
from gevent import monkey
def stub(*args, **kwargs): # pylint: disable=unused-argument
pass
monkey.patch_all = stub
import grequests
import os
import json
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
meraki_api_token = os.getenv("MERAKI_API_TOKEN")
meraki_org = os.getenv("MERAKI_ORG")
meraki_dashboard_map = os.getenv("MERAKI_DASHBOARD_MAP")
header = {"X-Cisco-Meraki-API-Key": meraki_api_token}
def get_meraki_networks():
# Get a list of all networks associated with the specified organization
url = "https://dashboard.meraki.com/api/v0/organizations/" + meraki_org + "/networks"
netlist = requests.get(url, headers=header)
netjson = json.loads(netlist.content.decode("utf-8"))
return netjson
def meraki_create_dashboard_link(linktype, linkname, displayval, urlappend, linknameid):
shownet = displayval
if meraki_dashboard_map:
mapjson = json.loads(meraki_dashboard_map.replace("'", '"'))
if linktype in mapjson:
if linkname in mapjson[linktype]:
shownet = "<a href='" + mapjson[linktype][linkname]["baseurl"] + urlappend + "'>" + displayval + "</a>"
if shownet == displayval and linktype == "devices" and linknameid == 0:
shownet = "<a href='https://dashboard.meraki.com/manage/nodes/show/" + linkname + "'>" + displayval + "</a>"
return shownet
def meraki_dashboard_client_mod(netlink, cliid, clidesc):
showcli = clidesc
if netlink:
if netlink.find("/manage") >= 0:
showcli = netlink.split("/manage")[0] + "/manage/usage/list#c=" + cliid + "'>" + clidesc + "</a>"
else:
showcli = "<a href='https://dashboard.meraki.com/manage/usage/list#c=" + cliid + "'>" + clidesc + "</a>"
return showcli
def collect_url_list(jsondata, baseurl, attr1, attr2, battr1, battr2):
# Iterates the jsondata list/dictionary and pulls out attributes to generate a list of URLs
# jsondata : list of dictionaries or dictionary of lists
# baseurl : base url to use. place a $1 to show where to substitute
# attr1 : when using a list of dictionaries, this is the key that will be retrieved from each dict in the list
# when using a dictionary of lists, this is the key where all of the lists will be found
# attr2 : (optional) pass "" to disable
# when using a dictionary of lists, this is the key that will be retrieved from each dict in each list
# These are both optional, and used if a second substitution is needed ($2)
# battr1 : (optional) when using a list of dictionaries, this is the key that will be retrieved from each dict
# in the list when using a dictionary of lists, this is the key where all of the lists will be found
# battr2 : (optional) pass "" to disable
# when using a dictionary of lists, this is the key that will be retrieved from each dict in each list
urllist = []
sub1 = ""
for jsonitem in jsondata:
if attr2 == "":
if attr1 in jsonitem:
urllist.append(baseurl.replace("$1", jsonitem[attr1]))
else:
if attr1 in jsondata[jsonitem]:
for jsonitem2 in jsondata[jsonitem][attr1]:
if isinstance(jsonitem2, str):
if jsonitem2 == attr2:
if battr1 == "":
urllist.append(baseurl.replace("$1", jsondata[jsonitem][attr1][jsonitem2]))
else:
sub1 = jsondata[jsonitem][attr1][jsonitem2]
else:
if battr1 == "":
urllist.append(baseurl.replace("$1", jsonitem2[attr2]))
else:
sub1 = jsonitem2[attr2]
if battr1 in jsondata[jsonitem]:
for jsonitem2 in jsondata[jsonitem][battr1]:
if isinstance(jsonitem2, str):
if jsonitem2 == battr2:
urllist.append(baseurl.replace("$1", sub1).replace("$2", jsondata[jsonitem][battr1][jsonitem2]))
else:
urllist.append(baseurl.replace("$1", sub1).replace("$2", jsonitem2[battr2]))
return urllist
def do_multi_get(url_list, comp_list, comp_id1, comp_id2, comp_url_idx, comp_key, content_key):
# Issues multiple GET requests to a list of URLs. Also will join dictionaries together based on returned content.
# url_list : list of URLs to issue GET requests to
# comp_list : (optional) pass [] to disable
# used to join the results of the GET operations to an existing dictionary
# comp_id1 : when using a list of dictionaries, this is the key to retrieve from each dict in the list
# when using a dictionary of lists, this is the key where all of the lists will be found
# comp_id2 : (optional) pass "" to disable
# when using a dictionary of lists, this is key that will be retrieved from each dict in each list
# comp_url_idx : (optional) pass -1 to disable
# when merging dictionaries, they can be merged either on a URL comparision or a matching key. Use
# this to specify that they be merged based on this specific index in the URL. So to match
# 'b' in http://a.com/b, you would specify 3 here, as that is the 3rd // section in the URL
# comp_key : (optional) pass "" to disable
# when merging dictionaries, they can be merged either on a URL comparision or a matching key. Use
# this to specify that they be merged based on this key found in the content coming back from the
# GET requests
# content_key : (optional when not merging, required when merging) pass "" to disable
# this is the base key added to the merged dictionary for the merged data
s = requests.Session()
retries = Retry(total=5, backoff_factor=0.2, status_forcelist=[403, 500, 502, 503, 504], raise_on_redirect=True,
raise_on_status=True)
s.mount('http://', HTTPAdapter(max_retries=retries))
s.mount('https://', HTTPAdapter(max_retries=retries))
rs = (grequests.get(u, headers=header, session=s) for u in url_list)
content_dict = {}
for itemlist in grequests.imap(rs, stream=False):
icontent = itemlist.content.decode("utf-8")
inlist = json.loads(icontent)
if len(inlist) > 0:
# Use the URL index if it was specified, otherwise use the comparision key
if comp_url_idx >= 0:
urllist = itemlist.url.split("/")
matchval = urllist[comp_url_idx]
else:
matchval = inlist[0][comp_key]
if len(comp_list) > 0:
# comp_list was passed, iterate and merge dictionaries
for net in comp_list:
if comp_id2 == "":
# this is a list of dictionaries. if this matches the search, add it to the content dict
if matchval == net[comp_id1]:
kid1 = net["id"]
if kid1 not in content_dict:
content_dict[kid1] = {}
content_dict[kid1]["info"] = net
content_dict[kid1][content_key] = inlist
break
else:
# this is a dictionary of lists. if the match is present in this dictionary, continue parsing
if matchval in json.dumps(comp_list[net][comp_id1]):
kid1 = comp_list[net]["info"]["id"]
for net2 in comp_list[net][comp_id1]:
kid2 = net2["serial"]
if comp_id2 in net2:
if matchval == net2[comp_id2]:
if kid1 not in content_dict:
content_dict[kid1] = {}
if comp_id1 not in content_dict[kid1]:
content_dict[kid1][comp_id1] = {}
if kid2 not in content_dict[kid1][comp_id1]:
content_dict[kid1][comp_id1][kid2] = {}
content_dict[kid1]["info"] = comp_list[net]
content_dict[kid1][comp_id1][kid2]["info"] = net2
content_dict[kid1][comp_id1][kid2][content_key] = inlist
break
else:
if matchval not in content_dict:
content_dict[matchval] = {}
if content_key != "":
if content_key not in content_dict[matchval]:
content_dict[matchval][content_key] = {}
content_dict[matchval][content_key] = inlist
else:
content_dict[matchval] = inlist
return content_dict
def decode_model(strmodel):
# Decodes the Meraki model number into it's general type.
outmodel = ""
if "MX" in strmodel:
outmodel = "appliance"
if "MS" in strmodel:
outmodel = "switch"
if "MR" in strmodel:
outmodel = "wireless"
if "MV" in strmodel:
outmodel = "camera"
if "MC" in strmodel:
outmodel = "phone"
if outmodel == "":
outmodel = strmodel[0:2]
return outmodel
def do_sort_smclients(in_smlist):
# Rearranges the SM Dictionary to group clients by MAC address rather than a single list
out_smlist = {}
for net in in_smlist:
if "devices" in in_smlist[net]:
for cli in in_smlist[net]["devices"]:
if net not in out_smlist:
out_smlist[net] = {"devices": {}}
out_smlist[net]["devices"][cli["wifiMac"]] = cli
return out_smlist
def do_split_networks(in_netlist):
# Splits out combined Meraki networks into individual device networks.
devdict = {}
for net in in_netlist:
base_name = in_netlist[net]["info"]["info"]["name"]
for dev in in_netlist[net]["info"]["devices"]:
thisdevtype = decode_model(dev["model"])
thisupl = {"uplinks": in_netlist[net]["devices"][dev["serial"]]["uplinks"]}
newname = base_name + " - " + thisdevtype
newdev = {**dev, **thisupl}
if newname in devdict:
devdict[newname].append(newdev)
else:
devdict[newname] = [newdev]
return devdict
def get_meraki_health(incoming_msg, rettype):
# Get a list of all networks associated with the specified organization
netjson = get_meraki_networks()
# Parse list of networks to extract/create URLs needed to get list of devices
urlnet = collect_url_list(netjson, "https://dashboard.meraki.com/api/v0/networks/$1/devices", "id", "", "", "")
# Get a list of all devices associated with the networks associated to the organization
netlist = do_multi_get(urlnet, netjson, "id", "", -1, "networkId", "devices")
# Get uplink status of devices
urlnetup = collect_url_list(netlist, "https://dashboard.meraki.com/api/v0/networks/$1/devices/$2/uplink", "info", "id", "devices", "serial")
netlistup = do_multi_get(urlnetup, netlist, "devices", "serial", 8, "", "uplinks")
# Split network lists up by device type
newnetlist = do_split_networks(netlistup)
totaldev = 0
offdev = 0
totaloffdev = 0
devicon = ""
retmsg = "<h3>Meraki Details:</h3>"
retmsg += "<a href='https://dashboard.meraki.com/'>Meraki Dashboard</a><br><ul>"
for net in sorted(newnetlist):
for dev in newnetlist[net]:
for upl in dev["uplinks"]:
if upl["interface"] == "WAN 1":
if upl["status"] != "Active":
offdev += 1
totaloffdev += 1
devicon = chr(0x2757) + chr(0xFE0F)
totaldev += len(newnetlist[net])
shownet = meraki_create_dashboard_link("networks", net, net, "", 0)
retmsg += "<li>Network '" + shownet + "' has " + str(offdev) + " device(s) offline out of " + str(len(newnetlist[net])) + " device(s)." + devicon + "</li>"
offdev = 0
devicon = ""
retmsg += "</ul><b>" + str(totaloffdev) + " device(s) offline out of a total of " + str(totaldev) + " device(s).</b>"
return retmsg
def get_meraki_clients(incoming_msg, rettype):
cmdlist = incoming_msg.text.split(" ")
client_id = cmdlist[len(cmdlist)-1]
# Get a list of all networks associated with the specified organization
netjson = get_meraki_networks()
# Parse list of networks to extract/create URLs needed to get list of devices
urlnet = collect_url_list(netjson, "https://dashboard.meraki.com/api/v0/networks/$1/devices", "id", "", "", "")
smnet = collect_url_list(netjson, "https://dashboard.meraki.com/api/v0/networks/$1/sm/devices/", "id", "", "", "")
# Get a list of all devices associated with the networks associated to the organization
netlist = do_multi_get(urlnet, netjson, "id", "", -1, "networkId", "devices")
smlist = do_multi_get(smnet, [], "id", "", 6, "", "")
newsmlist = do_sort_smclients(smlist)
# Parse list of devices to extract/create URLs needed to get list of clients
urldev = collect_url_list(netlist, "https://dashboard.meraki.com/api/v0/devices/$1/clients?timespan=86400", "devices", "serial", "", "")
# Get a list of all clients associated with the devices associated to the networks associated to the organization
netlist = do_multi_get(urldev, netlist, "devices", "serial", 6, "", "clients")
if rettype == "json":
return {"client": netlist, "sm": newsmlist}
else:
retmsg = "<h3>Associated Clients:</h3>"
for net in sorted(netlist):
for dev in netlist[net]["devices"]:
for cli in netlist[net]["devices"][dev]["clients"]:
if not isinstance(cli, str):
if cli["description"] == client_id and "switchport" in cli:
devbase = netlist[net]["devices"][dev]["info"]
showdev = meraki_create_dashboard_link("devices", devbase["mac"], devbase["name"], "?timespan=86400", 0)
showport = meraki_create_dashboard_link("devices", devbase["mac"], str(cli["switchport"]), "/ports/" + str(cli["switchport"]) + "?timespan=86400", 1)
showcli = meraki_dashboard_client_mod(showdev, cli["id"], cli["dhcpHostname"])
retmsg += "<i>Computer Name:</i> " + showcli + "<br>"
if net in newsmlist:
if "devices" in newsmlist[net]:
if cli["mac"] in newsmlist[net]["devices"]:
smbase = newsmlist[net]["devices"][cli["mac"]]
retmsg += "<i>Model:</i> " + smbase["systemModel"] + "<br>"
retmsg += "<i>OS:</i> " + smbase["osName"] + "<br>"
retmsg += "<i>IP:</i> " + cli["ip"] + "<br>"
retmsg += "<i>MAC:</i> " + cli["mac"] + "<br>"
retmsg += "<i>VLAN:</i> " + str(cli["vlan"]) + "<br>"
retmsg += "<i>Connected To:</i> " + showdev + " (" + devbase["model"] + "), Port " + showport + "<br>"
return retmsg
def get_meraki_health_html(incoming_msg):
return get_meraki_health(incoming_msg, "html")
def get_meraki_clients_html(incoming_msg):
return get_meraki_clients(incoming_msg, "html")
| joshand/CICO | cico_meraki.py | Python | mit | 16,220 |
"""
Created on 19 Nov 2020
@author: Bruno Beloff ([email protected])
Two-Channel I2C-Bus Switch With Interrupt Logic and Reset
https://www.ti.com/product/PCA9543A
"""
from scs_host.bus.i2c import I2C
# --------------------------------------------------------------------------------------------------------------------
class PCA9543A(object):
"""
classdocs
"""
___I2C_ADDR = 0x70
# ----------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
Constructor
"""
self.__addr = self.___I2C_ADDR
# ----------------------------------------------------------------------------------------------------------------
def enable(self, ch0, ch1):
ch0_en = 0x01 if ch0 else 0x00
ch1_en = 0x02 if ch1 else 0x00
ctrl = ch1_en | ch0_en
try:
I2C.Sensors.start_tx(self.__addr)
I2C.Sensors.write(ctrl)
finally:
I2C.Sensors.end_tx()
def read(self):
try:
I2C.Sensors.start_tx(self.__addr)
ctrl = I2C.Sensors.read(1)
finally:
I2C.Sensors.end_tx()
return ctrl
def reset(self):
self.enable(False, False)
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
try:
ctrl = "0x%02x" % self.read()
except OSError:
ctrl = None
return "PCA9543A:{addr:0x%02x, ctrl:%s}" % (self.__addr, ctrl)
| south-coast-science/scs_dfe_eng | src/scs_dfe/gas/scd30/pca9543a.py | Python | mit | 1,642 |
"""
A 2d grid map of m rows and n columns is initially filled with water.
We may perform an addLand operation which turns the water at position
(row, col) into a land. Given a list of positions to operate,
count the number of islands after each addLand operation.
An island is surrounded by water and is formed by connecting adjacent
lands horizontally or vertically.
You may assume all four edges of the grid are all surrounded by water.
Given m = 3, n = 3, positions = [[0,0], [0,1], [1,2], [2,1]].
Initially, the 2d grid grid is filled with water.
(Assume 0 represents water and 1 represents land).
0 0 0
0 0 0
0 0 0
Operation #1: addLand(0, 0) turns the water at grid[0][0] into a land.
1 0 0
0 0 0 Number of islands = 1
0 0 0
Operation #2: addLand(0, 1) turns the water at grid[0][1] into a land.
1 1 0
0 0 0 Number of islands = 1
0 0 0
Operation #3: addLand(1, 2) turns the water at grid[1][2] into a land.
1 1 0
0 0 1 Number of islands = 2
0 0 0
Operation #4: addLand(2, 1) turns the water at grid[2][1] into a land.
1 1 0
0 0 1 Number of islands = 3
0 1 0
"""
class Solution(object):
def num_islands2(self, m, n, positions):
ans = []
islands = Union()
for p in map(tuple, positions):
islands.add(p)
for dp in (0, 1), (0, -1), (1, 0), (-1, 0):
q = (p[0] + dp[0], p[1] + dp[1])
if q in islands.id:
islands.unite(p, q)
ans += [islands.count]
return ans
class Union(object):
def __init__(self):
self.id = {}
self.sz = {}
self.count = 0
def add(self, p):
self.id[p] = p
self.sz[p] = 1
self.count += 1
def root(self, i):
while i != self.id[i]:
self.id[i] = self.id[self.id[i]]
i = self.id[i]
return i
def unite(self, p, q):
i, j = self.root(p), self.root(q)
if i == j:
return
if self.sz[i] > self.sz[j]:
i, j = j, i
self.id[i] = j
self.sz[j] += self.sz[i]
self.count -= 1
| amaozhao/algorithms | algorithms/union-find/count_islands.py | Python | mit | 2,094 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""This file is part of the django ERP project.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__author__ = 'Emanuele Bertoldi <[email protected]>'
__copyright__ = 'Copyright (c) 2013-2015, django ERP Team'
__version__ = '0.0.1'
from django.conf import settings
from django.db.models.signals import post_save, pre_delete
from djangoerp.core.utils.models import get_model
from djangoerp.core.signals import manage_author_permissions
from .models import Menu, Link, Bookmark
from .utils import create_bookmarks, delete_bookmarks
## HANDLERS ##
def _create_bookmarks(sender, instance, *args, **kwargs):
create_bookmarks(instance)
def _delete_bookmarks(sender, instance, *args, **kwargs):
delete_bookmarks(instance)
## API ##
def manage_bookmarks(cls, enabled=True):
"""Connects handlers for bookmarks management.
This handler could be used to automatically create a related bookmark list
on given model class instance creation. i.e.:
>> manage_bookmarks(User)
It will auto generate a bookmark list associated to each new User's instance.
To disconnect:
>> manage_bookmarks(User, False)
"""
cls = get_model(cls)
cls_name = cls.__name__.lower()
create_dispatch_uid = "create_%s_bookmarks" % cls_name
delete_dispatch_uid = "delete_%s_bookmarks" % cls_name
if enabled:
post_save.connect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.connect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
else:
post_save.disconnect(_create_bookmarks, cls, dispatch_uid=create_dispatch_uid)
pre_delete.disconnect(_delete_bookmarks, cls, dispatch_uid=delete_dispatch_uid)
## CONNECTIONS ##
manage_author_permissions(Menu)
manage_author_permissions(Link)
manage_author_permissions(Bookmark)
manage_bookmarks(settings.AUTH_USER_MODEL)
| mobb-io/django-erp | djangoerp/menus/signals.py | Python | mit | 2,406 |
# @Author
# Chloe-Agathe Azencott
# [email protected]
# April 2016
import argparse
import h5py
import numpy as np
import os
import sys
def main():
""" Create train/test indices for one repeat of a 10-fold sampled leave-one-study-out
experiment on the RFS data.
The indices will be stored under
<data_dir>/outputs/U133A_combat_RFS/sampled_loso/repeat<repeat idx>
with the following structure:
For k=1..numFolds:
<k>/train.indices
List of indices of the training set (one per line).
<k>/train.labels
List of (0/1) labels of the training set (one per line).
<k>/test.indices
List of indices of the test set (one per line).
<k>/test.labels
List of (0/1) labels of the test set (one per line).
Parameters
----------
data_dir: path
Path to the data folder.
ACES, GSE_RFS, and the outputs directory must be under <data_dir>.
repeat: int
Repeat index.
Example
-------
$ python setUpSampledLOSO_writeIndices.py $SHAREDAT/SamSpecCoEN 0
Reference
---------
Allahyar, A., and Ridder, J. de (2015).
FERAL: network-based classifier with application to breast cancer outcome prediction.
Bioinformatics 31, i311--i319.
"""
parser = argparse.ArgumentParser(description="Build sample-specific co-expression networks" + \
"for a sampled LOSO on the RFS data",
add_help=True)
parser.add_argument("data_dir", help="Path to the data")
parser.add_argument("repeat", help="Index of the repeat", type=int)
args = parser.parse_args()
outDir = '%s/outputs/U133A_combat_RFS/sampled_loso/repeat%d' % (args.data_dir, args.repeat)
# Create outDir if it does not exist
if not os.path.isdir(outDir):
sys.stdout.write("Creating %s\n" % outDir)
try:
os.makedirs(outDir)
except OSError:
if not os.path.isdir(outDir):
raise
# Get expression data, sample labels.
# Do not normalize the data while loading it (so as not to use test data for normalization).
f = h5py.File("%s/ACES/experiments/data/U133A_combat.h5" % args.data_dir)
expressionData = np.array(f['U133A_combat_RFS']['ExpressionData'])
sampleLabels = np.array(f['U133A_combat_RFS']['PatientClassLabels'])
sampleAccess = np.array(f['U133A_combat_RFS']['PatientLabels']).tolist()
f.close()
# Map the indices to the studies
studyDict = {} # studyId:[sampleIdx]
gse_rfs_dir = '%s/GSE_RFS/' % args.data_dir
for studyFile in os.listdir(gse_rfs_dir):
studyPath = '%s/%s' % (gse_rfs_dir, studyFile)
print studyPath
with open(studyPath, 'r') as f:
gsmNames = set([x.split()[0] for x in f.readlines()])
f.close()
gsmNames = gsmNames.intersection(set(sampleAccess))
studyDict[studyFile.split(".")[0]] = [sampleAccess.index(gsm) for gsm in gsmNames]
studyList = studyDict.keys()
numStudies = len(studyList)
print "Found %d studies" % numStudies
np.random.seed(seed=args.repeat)
for foldNr in range(numStudies):
# Training data:
# randomly sample 50% of each study that is not foldNr
trIndices = []
for studyId in [x for x in studyList if x!=foldNr]:
studyIndices = np.random.choice(studyDict[studyId],
size=len(studyDict[studyId])/2,
replace=False)
trIndices.extend(studyIndices)
# studyIndices = studyDict[studyId]
# random.shuffle(studyIndices)
# n = len(studyIndices)
# trIndices.extend(studyIndices[:(n/2)])
# Test data:
# the data from foldNr
teIndices = studyDict[studyList[foldNr]]
# Create output directory
foldDir = "%s/fold%d" % (outDir, foldNr)
try:
os.makedirs(foldDir)
except OSError:
if not os.path.isdir(foldDir):
raise
# Save train indices to file
trIndicesF = '%s/train.indices' % foldDir
np.savetxt(trIndicesF, trIndices, fmt='%d')
sys.stdout.write("Wrote training indices for fold %d to %s\n" % (foldNr, trIndicesF))
# Save test indices to file
teIndicesF = '%s/test.indices' % foldDir
np.savetxt(teIndicesF, teIndices, fmt='%d')
sys.stdout.write("Wrote test indices for fold %d to %s\n" % (foldNr, teIndicesF))
# Save train labels to file
trLabelsF = '%s/train.labels' % foldDir
np.savetxt(trLabelsF, np.array(sampleLabels[trIndices], dtype='int'),
fmt='%d')
sys.stdout.write("Wrote training labels for fold %d to %s\n" % (foldNr, trLabelsF))
# Save test labels to file
teLabelsF = '%s/test.labels' % foldDir
np.savetxt(teLabelsF, np.array(sampleLabels[teIndices], dtype='int'),
fmt='%d')
sys.stdout.write("Wrote test labels for fold %d to %s\n" % (foldNr, teLabelsF))
if __name__ == "__main__":
main()
| chagaz/SamSpecCoEN | code/setupSampledLOSO_writeIndices.py | Python | mit | 5,303 |
import collections
import unittest
from kobold import assertions
class TestAssertEqual(unittest.TestCase):
def test_empty_hashes(self):
assertions.assert_equal({}, {})
def test_distinct_keys(self):
self.assertRaises(
AssertionError,
assertions.assert_equal,
{'a' : 1},
{'b' : 2})
Response = collections.namedtuple('Response', 'headers status_code data')
class TestAssertResponseMatches(unittest.TestCase):
def test_empty_body(self):
actual = Response(headers={}, status_code=200, data={})
assertions.assert_response_matches({'body' : {},
'status_code' : 200,
'headers' : {}}, actual)
def test_omit_status_and_headers(self):
actual = Response(headers={}, status_code=200, data={})
assertions.assert_response_matches({'body' : {}}, actual)
def test_equal_bodies(self):
actual = Response(
headers={},
status_code=200,
data={'key' : 'value'})
assertions.assert_response_matches({'body' : {'key' : 'value'},
'status_code' : 200,
'headers' : {}}, actual)
def test_unequal_bodies(self):
actual = Response(
headers={},
status_code=200,
data={'key' : 'value'})
self.assertRaises(
AssertionError,
assertions.assert_response_matches,
{'body' : {'key' : 'anothervalue'},
'status_code' : 200,
'headers' : {}},
actual)
def test_unequal_headers(self):
actual = Response(
headers={'header' : 'value'},
status_code=200,
data={'key' : 'value'})
self.assertRaises(
AssertionError,
assertions.assert_response_matches,
{'body' : {'key' : 'value'},
'status_code' : 200,
'headers' : {'header' : 'anothervalue'}},
actual)
| krieghan/kobold_python | kobold/tests/test_assertions.py | Python | mit | 2,267 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
import uuid
from .. import models
class ParameterGroupingOperations(object):
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def post_required(
self, parameter_grouping_post_required_parameters, custom_headers={}, raw=False, **operation_config):
"""
Post a bunch of required parameters grouped
:param parameter_grouping_post_required_parameters: Additional
parameters for the operation
:type parameter_grouping_post_required_parameters:
ParameterGroupingPostRequiredParameters
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: None or (None, requests.response) or concurrent.futures.Future
"""
body = None
if parameter_grouping_post_required_parameters is not None:
body = parameter_grouping_post_required_parameters.body
custom_header = None
if parameter_grouping_post_required_parameters is not None:
custom_header = parameter_grouping_post_required_parameters.custom_header
query = None
if parameter_grouping_post_required_parameters is not None:
query = parameter_grouping_post_required_parameters.query
path = None
if parameter_grouping_post_required_parameters is not None:
path = parameter_grouping_post_required_parameters.path
# Construct URL
url = '/parameterGrouping/postRequired/{path}'
path_format_arguments = {
'path': self._serialize.url("path", path, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
if query is not None:
query_parameters['query'] = self._serialize.query("query", query, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if custom_header is not None:
header_parameters['customHeader'] = self._serialize.header("custom_header", custom_header, 'str')
# Construct body
body_content = self._serialize.body(body, 'int')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_optional(
self, parameter_grouping_post_optional_parameters=None, custom_headers={}, raw=False, **operation_config):
"""
Post a bunch of optional parameters grouped
:param parameter_grouping_post_optional_parameters: Additional
parameters for the operation
:type parameter_grouping_post_optional_parameters:
ParameterGroupingPostOptionalParameters or None
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: None or (None, requests.response) or concurrent.futures.Future
"""
custom_header = None
if parameter_grouping_post_optional_parameters is not None:
custom_header = parameter_grouping_post_optional_parameters.custom_header
query = None
if parameter_grouping_post_optional_parameters is not None:
query = parameter_grouping_post_optional_parameters.query
# Construct URL
url = '/parameterGrouping/postOptional'
# Construct parameters
query_parameters = {}
if query is not None:
query_parameters['query'] = self._serialize.query("query", query, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if custom_header is not None:
header_parameters['customHeader'] = self._serialize.header("custom_header", custom_header, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_multiple_parameter_groups(
self, first_parameter_group=None, parameter_grouping_post_multiple_parameter_groups_second_parameter_group=None, custom_headers={}, raw=False, **operation_config):
"""
Post parameters from multiple different parameter groups
:param first_parameter_group: Additional parameters for the operation
:type first_parameter_group: FirstParameterGroup or None
:param
parameter_grouping_post_multiple_parameter_groups_second_parameter_group:
Additional parameters for the operation
:type
parameter_grouping_post_multiple_parameter_groups_second_parameter_group:
ParameterGroupingPostMultipleParameterGroupsSecondParameterGroup or
None
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: None or (None, requests.response) or concurrent.futures.Future
"""
header_one = None
if first_parameter_group is not None:
header_one = first_parameter_group.header_one
query_one = None
if first_parameter_group is not None:
query_one = first_parameter_group.query_one
header_two = None
if parameter_grouping_post_multiple_parameter_groups_second_parameter_group is not None:
header_two = parameter_grouping_post_multiple_parameter_groups_second_parameter_group.header_two
query_two = None
if parameter_grouping_post_multiple_parameter_groups_second_parameter_group is not None:
query_two = parameter_grouping_post_multiple_parameter_groups_second_parameter_group.query_two
# Construct URL
url = '/parameterGrouping/postMultipleParameterGroups'
# Construct parameters
query_parameters = {}
if query_one is not None:
query_parameters['query-one'] = self._serialize.query("query_one", query_one, 'int')
if query_two is not None:
query_parameters['query-two'] = self._serialize.query("query_two", query_two, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if header_one is not None:
header_parameters['header-one'] = self._serialize.header("header_one", header_one, 'str')
if header_two is not None:
header_parameters['header-two'] = self._serialize.header("header_two", header_two, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def post_shared_parameter_group_object(
self, first_parameter_group=None, custom_headers={}, raw=False, **operation_config):
"""
Post parameters with a shared parameter group object
:param first_parameter_group: Additional parameters for the operation
:type first_parameter_group: FirstParameterGroup or None
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: None or (None, requests.response) or concurrent.futures.Future
"""
header_one = None
if first_parameter_group is not None:
header_one = first_parameter_group.header_one
query_one = None
if first_parameter_group is not None:
query_one = first_parameter_group.query_one
# Construct URL
url = '/parameterGrouping/sharedParameterGroupObject'
# Construct parameters
query_parameters = {}
if query_one is not None:
query_parameters['query-one'] = self._serialize.query("query_one", query_one, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if header_one is not None:
header_parameters['header-one'] = self._serialize.header("header_one", header_one, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
| vulcansteel/autorest | AutoRest/Generators/Python/Azure.Python.Tests/Expected/AcceptanceTests/AzureParameterGrouping/auto_rest_parameter_grouping_test_service/operations/parameter_grouping_operations.py | Python | mit | 11,840 |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2015 [email protected]
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2015-11-23
#
"""Get app info with AppKit via objc bridge."""
from __future__ import print_function, unicode_literals, absolute_import
import time
import unicodedata
from AppKit import NSWorkspace
def decode(s):
"""Decode bytestring to Unicode."""
if isinstance(s, str):
s = unicode(s, 'utf-8')
elif not isinstance(s, unicode):
raise TypeError("str or unicode required, not {}".format(type(s)))
return unicodedata.normalize('NFC', s)
def get_frontmost_app():
"""Return (name, bundle_id and path) of frontmost application.
Raise a `RuntimeError` if frontmost application cannot be
determined.
"""
for app in NSWorkspace.sharedWorkspace().runningApplications():
if app.isActive():
app_name = app.localizedName()
bundle_id = app.bundleIdentifier()
app_path = app.bundleURL().fileSystemRepresentation()
return (app_name, bundle_id, app_path)
else:
raise RuntimeError("Couldn't get frontmost application.")
if __name__ == '__main__':
s = time.time()
get_frontmost_app()
d = time.time() - s
| deanishe/alfred-appscripts | active_app_benchmarks/active_app_02_objc_bridge.py | Python | mit | 1,286 |
"""Contains utility methods used by and with the pyebnf package."""
import math
def esc_split(text, delimiter=" ", maxsplit=-1, escape="\\", *, ignore_empty=False):
"""Escape-aware text splitting:
Split text on on a delimiter, recognizing escaped delimiters."""
is_escaped = False
split_count = 0
yval = []
for char in text:
if is_escaped:
is_escaped = False
yval.append(char)
else:
if char == escape:
is_escaped = True
elif char in delimiter and split_count != maxsplit:
if yval or not ignore_empty:
yield "".join(yval)
split_count += 1
yval = []
else:
yval.append(char)
yield "".join(yval)
def esc_join(iterable, delimiter=" ", escape="\\"):
"""Join an iterable by a delimiter, replacing instances of delimiter in items
with escape + delimiter.
"""
rep = escape + delimiter
return delimiter.join(i.replace(delimiter, rep) for i in iterable)
def get_newline_positions(text):
"""Returns a list of the positions in the text where all new lines occur. This is used by
get_line_and_char to efficiently find coordinates represented by offset positions.
"""
pos = []
for i, c in enumerate(text):
if c == "\n":
pos.append(i)
return pos
def get_line_and_char(newline_positions, position):
"""Given a list of newline positions, and an offset from the start of the source code
that newline_positions was pulled from, return a 2-tuple of (line, char) coordinates.
"""
if newline_positions:
for line_no, nl_pos in enumerate(newline_positions):
if nl_pos >= position:
if line_no == 0:
return (line_no, position)
else:
return (line_no, position - newline_positions[line_no - 1] - 1)
return (line_no + 1, position - newline_positions[-1] - 1)
else:
return (0, position)
def point_to_source(source, position, fmt=(2, True, "~~~~~", "^")):
"""Point to a position in source code.
source is the text we're pointing in.
position is a 2-tuple of (line_number, character_number) to point to.
fmt is a 4-tuple of formatting parameters, they are:
name default description
---- ------- -----------
surrounding_lines 2 the number of lines above and below the target line to print
show_line_numbers True if true line numbers will be generated for the output_lines
tail_body "~~~~~" the body of the tail
pointer_char "^" the character that will point to the position
"""
surrounding_lines, show_line_numbers, tail_body, pointer_char = fmt
line_no, char_no = position
lines = source.split("\n")
line = lines[line_no]
if char_no >= len(tail_body):
tail = " " * (char_no - len(tail_body)) + tail_body + pointer_char
else:
tail = " " * char_no + pointer_char + tail_body
if show_line_numbers:
line_no_width = int(math.ceil(math.log10(max(1, line_no + surrounding_lines))) + 1)
line_fmt = "{0:" + str(line_no_width) + "}: {1}"
else:
line_fmt = "{1}"
pivot = line_no + 1
output_lines = [(pivot, line), ("", tail)]
for i in range(surrounding_lines):
upper_ofst = i + 1
upper_idx = line_no + upper_ofst
lower_ofst = -upper_ofst
lower_idx = line_no + lower_ofst
if lower_idx >= 0:
output_lines.insert(0, (pivot + lower_ofst, lines[lower_idx]))
if upper_idx < len(lines):
output_lines.append((pivot + upper_ofst, lines[upper_idx]))
return "\n".join(line_fmt.format(n, c) for n, c in output_lines)
| treycucco/pyebnf | pyebnf/util.py | Python | mit | 3,561 |
####
# Figure 4
# needs:
# - data/*.npz produced by run.py
####
import glob
import sys
sys.path.append('..')
from lib.mppaper import *
import lib.mpsetup as mpsetup
import lib.immune as immune
files = sorted((immune.parse_value(f, 'epsilon'), f) for f in glob.glob("data/*.npz"))
import run
sigma = run.sigma
#### left figure ####
epsilons = []
similarities = []
similaritiesQ = []
similaritiesPtilde = []
for epsilon, f in files:
npz = np.load(f)
P1 = npz['P1']
P2 = npz['P2']
Ptilde1 = npz['Ptilde1']
Ptilde2 = npz['Ptilde2']
Q1 = npz['Q1']
Q2 = npz['Q2']
epsilons.append(epsilon)
similarities.append(immune.similarity(P1, P2))
similaritiesQ.append(immune.similarity(Q1, Q2))
similaritiesPtilde.append(immune.similarity(Ptilde1, Ptilde2))
fig = plt.figure()
ax = fig.add_subplot(121)
ax.axhline(1.0, color=almostblack)
ax.plot(epsilons, similarities, label='$P_r^\star$', **linedotstyle)
ax.plot(epsilons, similaritiesQ, label='$Q_a$', **linedotstyle)
ax.plot(epsilons, similaritiesPtilde, label=r'$\tilde P_a$', **linedotstyle)
ax.set_xlabel('noise $\epsilon$')
ax.set_ylabel('Similarity')
ax.set_xlim(0.0, 0.5)
ax.set_ylim(0.0, 1.05)
ax.legend(ncol=1, loc='center right')
ax.xaxis.labelpad = axis_labelpad
ax.yaxis.labelpad = axis_labelpad
mpsetup.despine(ax)
fig.tight_layout(pad=tight_layout_pad)
fig.subplots_adjust(top=0.85)
#### right figures ####
epsilon_illustration = 0.2
epsilon, f = [tup for tup in files if tup[0] == epsilon_illustration][0]
npz = np.load(f)
P1 = npz['P1']
P2 = npz['P2']
Qbase = npz['Qbase']
Q1 = npz['Q1']
Q2 = npz['Q2']
x = npz['x']
axQ = fig.add_subplot(222)
for i, Q in enumerate([Q1, Q2]):
axQ.plot(x/sigma, Q, lw=0.5 * linewidth, label='ind. %g' % (i+1))
axQ.set_xlim(0, 10)
axQ.set_ylabel(r'$Q_a$')
axP = fig.add_subplot(224, sharex=axQ)
for i, p in enumerate([P1, P2]):
axP.plot(x/sigma, p, label='ind. %g' % (i+1), **linedotstyle)
axP.locator_params(axis='x', nbins=5, tight=True)
axP.set_xlim(0, 20)
axP.set_ylabel(r'$P_r^\star$')
axP.legend(ncol=2, handletextpad=0.1,
loc='upper right',
bbox_to_anchor=(1.05, 1.20))
for a in [axQ, axP]:
a.set_ylim(ymin=0.0)
mpsetup.despine(a)
a.set_yticks([])
a.xaxis.labelpad = axis_labelpad
a.yaxis.labelpad = axis_labelpad
axP.set_xlabel('$x \; / \; \sigma$')
plt.setp(axQ.get_xticklabels(), visible=False)
#### finish figure ####
fig.tight_layout(pad=tight_layout_pad, h_pad=1.0)
fig.savefig('fig4.svg')
plt.show()
| andim/optimmune | fig4/fig4.py | Python | mit | 2,501 |
import random
import string
def random_string(n):
result = ''
for _ in range(10):
result += random.SystemRandom().choice(
string.ascii_uppercase + string.digits)
return result
| adrianp/cartz | server/utils.py | Python | mit | 210 |
# -*- coding: utf-8 -*-
from os import getcwd, listdir
from os.path import abspath, dirname, isdir, join as path_join
from shutil import rmtree
from sys import exc_info
from tempfile import mkdtemp
from unittest import TestCase
from mock import patch, MagicMock
from robot.libraries.BuiltIn import BuiltIn
CURDIR = abspath(dirname(__file__))
class TestScreenshot(TestCase):
def setUp(self):
self.mock = MagicMock()
self.patcher = patch.dict('sys.modules', {'pyautogui': self.mock})
self.patcher.start()
from ImageHorizonLibrary import ImageHorizonLibrary
self.lib = ImageHorizonLibrary()
def tearDown(self):
self.mock.reset_mock()
self.patcher.stop()
def _take_screenshot_many_times(self, expected_filename):
folder = path_join(CURDIR, 'reference_folder')
self.lib.set_screenshot_folder(folder)
for i in range(1, 15):
self.lib.take_a_screenshot()
self.mock.screenshot.assert_called_once_with(
path_join(folder, expected_filename % i))
self.mock.reset_mock()
def test_take_a_screenshot(self):
self._take_screenshot_many_times('ImageHorizon-screenshot-%d.png')
def test_take_a_screenshot_inside_robot(self):
with patch.object(BuiltIn, 'get_variable_value',
return_value='Suite Name'):
self._take_screenshot_many_times('SuiteName-screenshot-%d.png')
def test_take_a_screenshot_with_invalid_folder(self):
from ImageHorizonLibrary import ScreenshotFolderException
for index, invalid_folder in enumerate((None, 0, False), 1):
self.lib.screenshot_folder = invalid_folder
expected = path_join(getcwd(),
'ImageHorizon-screenshot-%d.png' % index)
self.lib.take_a_screenshot()
self.mock.screenshot.assert_called_once_with(expected)
self.mock.reset_mock()
for invalid_folder in (123, object()):
self.lib.screenshot_folder = invalid_folder
with self.assertRaises(ScreenshotFolderException):
self.lib.take_a_screenshot()
| Eficode/robotframework-imagehorizonlibrary | tests/utest/test_screenshot.py | Python | mit | 2,185 |
import tkinter.filedialog as tkFileDialog
import numpy as np
from numpy import sin,cos
import os
def InnerOrientation(mat1,mat2):
"""
mat1 为像素坐标,4*2,mat2为理论坐标4*2,
h0,h1,h2,k0,k1,k2,这六个参数由下列矩阵定义:
[x]=[h0]+[h1 h2] [i]
[y]=[k0]+[k1 k2] [j]
返回6个定向参数的齐次矩阵,x方向单位权方差,y方向单位权方差
[h1 h2 h0]
[k1 k2 k0]
[0 0 1 ]
"""
# mat1=np.matrix(mat1)
# mat2=np.matrix(mat2)
y=mat2.ravel()
y=y.T
xlist=[]
for i in range(int(y.size/2)):
x0=np.matrix([[1,mat1[i,0],mat1[i,1],0,0,0],[0,0,0,1,mat1[i,0],mat1[i,1]]])
xlist.append(x0)
x=np.vstack(xlist)
# print(x)
N=np.linalg.inv(x.T @ x)
beta=N @ x.T @ y
# print(beta)
r=(np.size(y)-6)
e=y-x@beta
ex=e[0::2]
ey=e[1::2]
sigmax=(np.linalg.norm(ex)/r)
sigmay=(np.linalg.norm(ey)/r)
# print(sigmax)
# print(sigmay)
return(np.matrix([[beta[1,0],beta[2,0],beta[0,0]],[beta[4,0],beta[5,0],beta[3,0]],[0,0,1]]),sigmax,sigmay)
def openkbfile():
#default_dir = r"C:\Users\lenovo\Desktop" # 设置默认打开目录
fname = tkFileDialog.askopenfilename(title=u"选择文件",filetypes=[("kb file", "*.kb"), ("all", "*.*")],initialdir=r"D:\学习\摄影测量\摄影测量实验数据-后方交会、前方交会")
f=open(fname,mode='r')
lines=f.readlines()
f.close()
mat=[]
for line in lines:
t=line.split()
mat.append([float(t[0]),float(t[1])])
#initialdir=(os.path.expanduser(default_dir))
# print(fname) # 返回文件全路径
mat1=mat[0::2]
mat2=mat[1::2]
mat,sigmax2,sigmay2=InnerOrientation(np.matrix(mat1),np.matrix(mat2))
print(mat,sigmax2,sigmay2)
# def transform(mat,coormat):
# """
# mat:齐次矩阵,由InnerOrientation获得
# coormat:齐次坐标:即每列第三个元素为1,每个坐标均为列向量。列数不限。
# 返回:转换后的坐标
# """
# return mat@coormat
# def openaofile():
# fname = tkFileDialog.askopenfilename(title=u"选择文件",filetypes=[("ao.txt file", "*.txt"), ("all", "*.*")],initialdir=r"D:\学习\摄影测量\摄影测量实验数据-后方交会、前方交会")
# f=open(fname,mode='r')
# lines=f.readlines()
# f.close()
# matimage=[]
# matground=[]
# for line in lines[1:]:
# t=line.split()
# matimage.append([float(t[0]),float(t[1])])
# matground.append([float(t[2]),float(t[3]),float(t[4])])
# return(np.matrix(matimage),np.matrix(matground))
# def resection():
# matimage,matground=openaofile()
# dist=np.linalg.norm(matimage[1]-matimage[0])
# Dist=np.linalg.norm(matground[1]-matground[0])
# matimage=matimage.T
# matground=matground.T
# n=dist.shape[0]
# assert n==5
# m=Dist/dist
# x0,y0,f=0,0,210.681 #均以毫米作单位
# Xs0,Ys0,H=np.average(matground,axis=0)
# H+=m*f
# phi,omega,kappa=0,0,0
# R=np.zeros((3,3))
# R[0,0]=cos(phi)*cos(kappa)-sin(phi)*sin(omega)*sin(kappa)
# R[0,1]=-cos(phi)*sin(kappa)-sin(phi)*sin(omega)*cos(kappa)
# R[0,2]=-sin(phi)*cos(omega)
# R[1,0]=cos(omega)*sin(kappa)
# R[1,1]=cos(omega)*cos(kappa)
# R[1,2]=-sin(omega)
# R[2,0]=sin(phi)*cos(kappa)+cos(phi)*sin(omega)*sin(kappa)
# R[2,1]=-sin(phi)*sin(kappa)+cos(phi)*sin(omega)*cos(kappa)
# R[2,2]=cos(phi)*cos(omega)
# matimage1=np.zeros((2,5))
# S=np.matrix([Xs0,Ys0,H]).T
# Alist=[]
# Vlist=[]
# Llist=[]
# for i in range(5):
# u=matground[:,i]-S
# matimage1[0,i]=-f*np.dot(R[0],u)/np.dot(R[2],u)
# matimage1[1,i]=-f*np.dot(R[1],u)/np.dot(R[2],u)
# zba=np.dot(R[2],u)
# A=np.zeros(2,6)
# # A[0,0]=(R[0,0]*f+R[0,2]*matimage[])[]
if __name__=="__main__":
openkbfile() | YU6326/YU6326.github.io | code/photogrammetry/inner_orientation.py | Python | mit | 3,899 |
# coding=utf-8
"""Unit tests for mapi/endpoints/tmdb.py."""
import pytest
from mapi.endpoints import tmdb_find, tmdb_movies, tmdb_search_movies
from mapi.exceptions import MapiNotFoundException, MapiProviderException
from tests import JUNK_TEXT
GOONIES_IMDB_ID = "tt0089218"
GOONIES_TMDB_ID = 9340
JUNK_IMDB_ID = "tt1234567890"
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_find__imdb_success(tmdb_api_key):
expected_top_level_keys = {
"movie_results",
"person_results",
"tv_episode_results",
"tv_results",
"tv_season_results",
}
expected_movie_results_keys = {
"adult",
"backdrop_path",
"genre_ids",
"id",
"original_language",
"original_title",
"overview",
"poster_path",
"popularity",
"release_date",
"title",
"video",
"vote_average",
"vote_count",
}
result = tmdb_find(tmdb_api_key, "imdb_id", GOONIES_IMDB_ID)
assert isinstance(result, dict)
assert set(result.keys()) == expected_top_level_keys
assert len(result.get("movie_results", {})) > 0
assert expected_movie_results_keys == set(
result.get("movie_results", {})[0].keys()
)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_find__api_key_fail():
with pytest.raises(MapiProviderException):
tmdb_find(JUNK_TEXT, "imdb_id", GOONIES_IMDB_ID, cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_find__invalid_id_imdb(tmdb_api_key):
with pytest.raises(MapiProviderException):
tmdb_find(tmdb_api_key, "imdb_id", JUNK_TEXT, cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_find__not_found(tmdb_api_key):
with pytest.raises(MapiNotFoundException):
tmdb_find(tmdb_api_key, "imdb_id", JUNK_IMDB_ID)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_movies__success(tmdb_api_key):
expected_top_level_keys = {
"adult",
"backdrop_path",
"belongs_to_collection",
"budget",
"genres",
"homepage",
"id",
"imdb_id",
"original_language",
"original_title",
"overview",
"popularity",
"poster_path",
"production_companies",
"production_countries",
"release_date",
"revenue",
"runtime",
"spoken_languages",
"status",
"tagline",
"title",
"video",
"vote_average",
"vote_count",
}
result = tmdb_movies(tmdb_api_key, GOONIES_TMDB_ID)
assert isinstance(result, dict)
assert set(result.keys()) == expected_top_level_keys
assert result.get("original_title") == "The Goonies"
def test_tmdb_movies__api_key_fail():
with pytest.raises(MapiProviderException):
tmdb_movies(JUNK_TEXT, "", cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_movies__id_tmdb_fail(tmdb_api_key):
with pytest.raises(MapiProviderException):
tmdb_movies(tmdb_api_key, JUNK_TEXT, cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_movies__not_found(tmdb_api_key):
with pytest.raises(MapiNotFoundException):
tmdb_movies(tmdb_api_key, "1" * 10)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_search_movies__success(tmdb_api_key):
expected_top_level_keys = {
"page",
"results",
"total_pages",
"total_results",
}
expected_results_keys = {
"adult",
"backdrop_path",
"genre_ids",
"id",
"original_language",
"original_title",
"overview",
"popularity",
"poster_path",
"release_date",
"title",
"video",
"vote_average",
"vote_count",
}
result = tmdb_search_movies(tmdb_api_key, "the goonies", 1985)
assert isinstance(result, dict)
assert set(result.keys()) == expected_top_level_keys
assert isinstance(result["results"], list)
assert expected_results_keys == set(result.get("results", [{}])[0].keys())
assert len(result["results"]) == 1
assert result["results"][0]["original_title"] == "The Goonies"
result = tmdb_search_movies(tmdb_api_key, "the goonies")
assert len(result["results"]) > 1
def test_tmdb_search_movies__bad_api_key():
with pytest.raises(MapiProviderException):
tmdb_search_movies(JUNK_TEXT, "the goonies", cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_search_movies__bad_title(tmdb_api_key):
with pytest.raises(MapiNotFoundException):
tmdb_search_movies(tmdb_api_key, JUNK_TEXT, cache=False)
@pytest.mark.usefixtures("tmdb_api_key")
def test_tmdb_search_movies__bad_year(tmdb_api_key):
with pytest.raises(MapiProviderException):
tmdb_search_movies(
tmdb_api_key, "the goonies", year=JUNK_TEXT, cache=False
)
| jkwill87/mapi | tests/endpoints/test_endpoints_tmdb.py | Python | mit | 4,908 |
"""Provide infrastructure to allow exploration of variations within populations.
Uses the gemini framework (https://github.com/arq5x/gemini) to build SQLite
database of variations for query and evaluation.
"""
import collections
import csv
from distutils.version import LooseVersion
import os
import subprocess
import toolz as tz
from bcbio import install, utils
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import config_utils
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do, programs
from bcbio.variation import multiallelic, vcfutils
def prep_gemini_db(fnames, call_info, samples, extras):
"""Prepare a gemini database from VCF inputs prepared with snpEff.
"""
data = samples[0]
out_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "gemini"))
name, caller, is_batch = call_info
gemini_db = os.path.join(out_dir, "%s-%s.db" % (name, caller))
multisample_vcf = get_multisample_vcf(fnames, name, caller, data)
gemini_vcf = multiallelic.to_single(multisample_vcf, data)
use_gemini_quick = (do_db_build(samples) and
any(vcfutils.vcf_has_variants(f) for f in fnames))
if not utils.file_exists(gemini_db) and use_gemini_quick:
use_gemini = do_db_build(samples) and any(vcfutils.vcf_has_variants(f) for f in fnames)
if use_gemini:
ped_file = create_ped_file(samples + extras, gemini_vcf)
gemini_db = create_gemini_db(gemini_vcf, data, gemini_db, ped_file)
return [[(name, caller), {"db": gemini_db if utils.file_exists(gemini_db) else None,
"vcf": multisample_vcf if is_batch else None}]]
def create_gemini_db(gemini_vcf, data, gemini_db=None, ped_file=None):
if not gemini_db:
gemini_db = "%s.db" % utils.splitext_plus(gemini_vcf)[0]
if not utils.file_exists(gemini_db):
if not vcfutils.vcf_has_variants(gemini_vcf):
return None
with file_transaction(data, gemini_db) as tx_gemini_db:
gemini = config_utils.get_program("gemini", data["config"])
if "program_versions" in data["config"].get("resources", {}):
gemini_ver = programs.get_version("gemini", config=data["config"])
else:
gemini_ver = None
# Recent versions of gemini allow loading only passing variants
load_opts = ""
if not gemini_ver or LooseVersion(gemini_ver) > LooseVersion("0.6.2.1"):
load_opts += " --passonly"
# For small test files, skip gene table loading which takes a long time
if gemini_ver and LooseVersion(gemini_ver) > LooseVersion("0.6.4"):
if _is_small_vcf(gemini_vcf):
load_opts += " --skip-gene-tables"
if "/test_automated_output/" in gemini_vcf:
load_opts += " --test-mode"
# Skip CADD or gerp-bp if neither are loaded
if gemini_ver and LooseVersion(gemini_ver) >= LooseVersion("0.7.0"):
gemini_dir = install.get_gemini_dir(data)
for skip_cmd, check_file in [("--skip-cadd", "whole_genome_SNVs.tsv.compressed.gz")]:
if not os.path.exists(os.path.join(gemini_dir, check_file)):
load_opts += " %s" % skip_cmd
# skip gerp-bp which slows down loading
load_opts += " --skip-gerp-bp "
num_cores = data["config"]["algorithm"].get("num_cores", 1)
tmpdir = os.path.dirname(tx_gemini_db)
eanns = _get_effects_flag(data)
# Apply custom resource specifications, allowing use of alternative annotation_dir
resources = config_utils.get_resources("gemini", data["config"])
gemini_opts = " ".join([str(x) for x in resources["options"]]) if resources.get("options") else ""
cmd = ("{gemini} {gemini_opts} load {load_opts} -v {gemini_vcf} {eanns} --cores {num_cores} "
"--tempdir {tmpdir} {tx_gemini_db}")
cmd = cmd.format(**locals())
do.run(cmd, "Create gemini database for %s" % gemini_vcf, data)
if ped_file:
cmd = [gemini, "amend", "--sample", ped_file, tx_gemini_db]
do.run(cmd, "Add PED file to gemini database", data)
return gemini_db
def _get_effects_flag(data):
effects_config = tz.get_in(("config", "algorithm", "effects"), data, "snpeff")
if effects_config == "snpeff":
return "-t snpEff"
elif effects_config == "vep":
return "-t VEP"
else:
return ""
def get_affected_status(data):
"""Retrieve the affected/unaffected status of sample.
Uses unaffected (1), affected (2), unknown (0) coding from PED files:
http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped
"""
affected = set(["tumor", "affected"])
unaffected = set(["normal", "unaffected"])
phenotype = str(tz.get_in(["metadata", "phenotype"], data, "")).lower()
if phenotype in affected:
return 2
elif phenotype in unaffected:
return 1
else:
return 0
def create_ped_file(samples, base_vcf):
"""Create a GEMINI-compatible PED file, including gender, family and phenotype information.
Checks for a specified `ped` file in metadata, and will use sample information from this file
before reconstituting from metadata information.
"""
def _code_gender(data):
g = dd.get_gender(data)
if g and str(g).lower() in ["male", "m"]:
return 1
elif g and str(g).lower() in ["female", "f"]:
return 2
else:
return 0
out_file = "%s.ped" % utils.splitext_plus(base_vcf)[0]
sample_ped_lines = {}
header = ["#Family_ID", "Individual_ID", "Paternal_ID", "Maternal_ID", "Sex", "Phenotype", "Ethnicity"]
for md_ped in list(set([x for x in [tz.get_in(["metadata", "ped"], data)
for data in samples] if x is not None])):
with open(md_ped) as in_handle:
reader = csv.reader(in_handle, dialect="excel-tab")
for parts in reader:
if parts[0].startswith("#") and len(parts) > len(header):
header = header + parts[len(header):]
else:
sample_ped_lines[parts[1]] = parts
if not utils.file_exists(out_file):
with file_transaction(samples[0], out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
writer = csv.writer(out_handle, dialect="excel-tab")
writer.writerow(header)
batch = _find_shared_batch(samples)
for data in samples:
sname = dd.get_sample_name(data)
if sname in sample_ped_lines:
writer.writerow(sample_ped_lines[sname])
else:
writer.writerow([batch, sname, "-9", "-9",
_code_gender(data), get_affected_status(data), "-9"])
return out_file
def _find_shared_batch(samples):
for data in samples:
batch = tz.get_in(["metadata", "batch"], data, dd.get_sample_name(data))
if not isinstance(batch, (list, tuple)):
return batch
def _is_small_vcf(vcf_file):
"""Check for small VCFs which we want to analyze quicker.
"""
count = 0
small_thresh = 250
with utils.open_gzipsafe(vcf_file) as in_handle:
for line in in_handle:
if not line.startswith("#"):
count += 1
if count > small_thresh:
return False
return True
def get_multisample_vcf(fnames, name, caller, data):
"""Retrieve a multiple sample VCF file in a standard location.
Handles inputs with multiple repeated input files from batches.
"""
unique_fnames = []
for f in fnames:
if f not in unique_fnames:
unique_fnames.append(f)
out_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "gemini"))
if len(unique_fnames) > 1:
gemini_vcf = os.path.join(out_dir, "%s-%s.vcf.gz" % (name, caller))
vrn_file_batch = None
for variant in data["variants"]:
if variant["variantcaller"] == caller and variant.get("vrn_file_batch"):
vrn_file_batch = variant["vrn_file_batch"]
if vrn_file_batch:
utils.symlink_plus(vrn_file_batch, gemini_vcf)
return gemini_vcf
else:
return vcfutils.merge_variant_files(unique_fnames, gemini_vcf, data["sam_ref"],
data["config"])
else:
gemini_vcf = os.path.join(out_dir, "%s-%s%s" % (name, caller, utils.splitext_plus(unique_fnames[0])[1]))
utils.symlink_plus(unique_fnames[0], gemini_vcf)
return gemini_vcf
def _has_gemini(data):
from bcbio import install
gemini_dir = install.get_gemini_dir(data)
return ((os.path.exists(gemini_dir) and len(os.listdir(gemini_dir)) > 0)
and os.path.exists(os.path.join(os.path.dirname(gemini_dir), "gemini-config.yaml")))
def do_db_build(samples, need_bam=True, gresources=None):
"""Confirm we should build a gemini database: need gemini + human samples + not in tool_skip.
"""
genomes = set()
for data in samples:
if not need_bam or data.get("align_bam"):
genomes.add(data["genome_build"])
if "gemini" in utils.get_in(data, ("config", "algorithm", "tools_off"), []):
return False
if len(genomes) == 1:
if not gresources:
gresources = samples[0]["genome_resources"]
return (tz.get_in(["aliases", "human"], gresources, False)
and _has_gemini(samples[0]))
else:
return False
def get_gemini_files(data):
"""Enumerate available gemini data files in a standard installation.
"""
try:
from gemini import annotations, config
except ImportError:
return {}
return {"base": config.read_gemini_config()["annotation_dir"],
"files": annotations.get_anno_files().values()}
def _group_by_batches(samples, check_fn):
"""Group data items into batches, providing details to retrieve results.
"""
batch_groups = collections.defaultdict(list)
singles = []
out_retrieve = []
extras = []
for data in [x[0] for x in samples]:
if check_fn(data):
batch = tz.get_in(["metadata", "batch"], data)
name = str(data["name"][-1])
if batch:
out_retrieve.append((str(batch), data))
else:
out_retrieve.append((name, data))
for vrn in data["variants"]:
if vrn.get("population", True):
if batch:
batch_groups[(str(batch), vrn["variantcaller"])].append((vrn["vrn_file"], data))
else:
singles.append((name, vrn["variantcaller"], data, vrn["vrn_file"]))
else:
extras.append(data)
return batch_groups, singles, out_retrieve, extras
def _has_variant_calls(data):
if data.get("align_bam"):
for vrn in data["variants"]:
if vrn.get("vrn_file") and vcfutils.vcf_has_variants(vrn["vrn_file"]):
return True
return False
def prep_db_parallel(samples, parallel_fn):
"""Prepares gemini databases in parallel, handling jointly called populations.
"""
batch_groups, singles, out_retrieve, extras = _group_by_batches(samples, _has_variant_calls)
to_process = []
has_batches = False
for (name, caller), info in batch_groups.iteritems():
fnames = [x[0] for x in info]
to_process.append([fnames, (str(name), caller, True), [x[1] for x in info], extras])
has_batches = True
for name, caller, data, fname in singles:
to_process.append([[fname], (str(name), caller, False), [data], extras])
if len(samples) > 0 and not do_db_build([x[0] for x in samples]) and not has_batches:
return samples
output = parallel_fn("prep_gemini_db", to_process)
out_fetch = {}
for batch_id, out_file in output:
out_fetch[tuple(batch_id)] = out_file
out = []
for batch_name, data in out_retrieve:
out_variants = []
for vrn in data["variants"]:
use_population = vrn.pop("population", True)
if use_population:
vrn["population"] = out_fetch[(batch_name, vrn["variantcaller"])]
out_variants.append(vrn)
data["variants"] = out_variants
out.append([data])
for x in extras:
out.append([x])
return out
| gifford-lab/bcbio-nextgen | bcbio/variation/population.py | Python | mit | 12,759 |
from Operators.Mutation.Mutator import Mutator
from Operators.Mutation.DisplacementMutator import DisplacementMutator
from Operators.Mutation.InversionMutator import InversionMutator
| akkenoth/TSPGen | Operators/Mutation/__init__.py | Python | mit | 183 |
class Solution(object):
def dfs(self,rooms):
# get all gate position
queue=[(i,j,0) for i,rows in enumerate(rooms) for j,v in enumerate(rows) if not v]
while queue:
i,j,depth=queue.pop()
# has a min path to gate and update
if depth<rooms[i][j]:
rooms[i][j]=depth
for newi,newj in (i+1,j),(i-1,j),(i,j-1),(i,j+1):
if 0<=newi<len(rooms) and 0<=newj<len(rooms[0]) and depth<rooms[newi][newj]:
queue.append((newi,newj,depth+1))
def bfs(self,rooms):
# get all gate position
queue=[(i,j) for i,rows in enumerate(rooms) for j,v in enumerate(rows) if not v]
while queue:
# pop the fist insert
i,j=queue.pop(0)
for newi,newj in (i+1,j),(i-1,j),(i,j-1),(i,j+1):
if 0<=newi<len(rooms) and 0<=newj<len(rooms[0]) and rooms[newi][newj]==2147483647:
rooms[newi][newj]=rooms[i][j]+1
queue.append((newi,newj))
def wallsAndGates(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: void Do not return anything, modify rooms in-place instead.
"""
self.bfs(rooms)
| Tanych/CodeTracking | 286-Walls-and-Gates/solution.py | Python | mit | 1,268 |
from .base import BASE_DIR, INSTALLED_APPS, MIDDLEWARE_CLASSES, REST_FRAMEWORK
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1']
SECRET_KEY = 'secret'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'holonet',
'USER': 'holonet',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '',
}
}
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/0',
}
}
EMAIL_BACKEND = 'django.utils.mail.backends.console.EmailBackend'
BROKER_URL = 'redis://127.0.0.1'
ELASTICSEARCH = {
'default': {
'hosts': [
'127.0.0.1:9200'
]
}
}
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'] += ['rest_framework.renderers.BrowsableAPIRenderer']
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = ('127.0.0.1', )
POSTFIX_TRANSPORT_MAPS_LOCATION = '{0}/../mta/shared/'.format(BASE_DIR)
| webkom/holonet | holonet/settings/development.py | Python | mit | 1,029 |
import requests
import copy
# Получаем участников группы FB
def fb_get_group_members(fb_page_id, access_token):
url = 'https://graph.facebook.com/v2.8/%s/members?limit=1000&access_token=%s' % (fb_page_id, access_token)
fb_group_members = {'status':'OK', 'data':{'members':[], 'users_count':0}}
while True:
response = requests.get(url)
if response.status_code == 200:
try:
keys = response.json().keys()
if 'data' in keys:
#title_id += [ id['id'] for id in content ]
content = response.json()['data']
keys = response.json().keys()
url = ''
if 'paging' in keys:
keys = response.json()['paging'].keys()
if 'next' in keys:
url = response.json()['paging']['next']
for member in content:
member['is_group_mamber'] = 1
fb_group_members['data']['members'].append(member)
if url =='':
break
except (KeyError, TypeError):
fb_group_members['status'] = 'Unknown error'
break
else:
fb_group_members['status'] = str(response.status_code)
break
fb_group_members['data']['users_count'] = len(fb_group_members['data']['members'])
return fb_group_members
# получаем общую информацию о группе
def fb_get_group_data(fb_page_id, access_token):
url = 'https://graph.facebook.com/v2.8/%s/?fields=id,name&access_token=%s' % (fb_page_id, access_token)
fb_group = {'status':'OK', 'data':{'id':'','name':'', 'updated_time':'','members':[], 'users_count':0, 'all_users':[] }}
response =requests.get(url)
if response.status_code == 200:
fb_data = fb_group['data']
data = response.json()
keys = data.keys()
if 'id' in keys:
fb_data['id'] = data['id']
else:
fb_group['status'] = 'Missing group id'
if 'name' in keys:
fb_data['name'] = data['name']
else:
fb_group['status'] = 'Missing group name'
'''
if 'updated_time' in keys:
fb_data['updated_time'] = data['updated_time']
'''
members = fb_get_group_members(fb_page_id, access_token)
if members['status']== 'OK':
fb_group['data']['members'] = copy.deepcopy(members['data']['members'])
fb_group['data']['users_count'] = members['data']['users_count']
fb_group['data']['all_users'] = copy.deepcopy(members['data']['members'])
else:
fb_group['status'] = str(response.status_code)
return fb_group
#-----Получаем все посты из группы-------
def fb_get_all_posts(fb_page_id, access_token):
url = 'https://graph.facebook.com/v2.8/%s/feed?fields=id,name,link,message,from,updated_time,created_time&access_token=%s' % (fb_page_id, access_token)
fb_posts = {'status':'OK', 'data':{'posts':[],'posts_count':0}}
#fb_posts = {'status':'OK', 'data':{'id':'','name':'', 'updated_time':'','link':'', 'message':''}}
while True:
response = requests.get(url)
# print(response.status_code)
if response.status_code == 200:
try:
keys = response.json().keys()
#найти is jason
if 'data' in keys:
content = response.json()['data']
keys = response.json().keys()
url = ''
if 'paging' in keys:
keys = response.json()['paging'].keys()
if 'next' in keys:
url = response.json()['paging']['next']
for post in content:
fb_posts['data']['posts'].append(post)
if url =='':
break
except (KeyError, TypeError):
fb_posts['status'] = 'Unknown error'
break
else:
fb_posts['status'] = str(response.status_code)
break
fb_posts['data']['posts_count'] = len(fb_posts['data']['posts'])
return fb_posts
#получаем все лайки поста
def fb_get_post_likes(post_id, access_token):
url = 'https://graph.facebook.com/v2.8/%s/reactions/?access_token=%s' % (post_id, access_token)
fb_likes = {'status':'OK', 'data':{'likes':[],'likes_count':0}}
while True:
response = requests.get(url)
if response.status_code == 200:
try:
keys = response.json().keys()
#найти is jason
if 'data' in keys:
content = response.json()['data']
keys = response.json().keys()
url = ''
if 'paging' in keys:
keys = response.json()['paging'].keys()
if 'next' in keys:
url = response.json()['paging']['next']
for fb_like in content:
fb_likes['data']['likes'].append(fb_like)
if url =='':
break
except (KeyError, TypeError):
fb_likes['status'] = 'Unknown error'
break
else:
fb_likes['status'] = str(response.status_code)
break
fb_likes['data']['likes_count'] = len(fb_likes['data']['likes'])
return fb_likes
#получаем все комментарии поста
def fb_get_post_comments(post_id, access_token):
url = 'https://graph.facebook.com/v2.8/%s/comments/?fields=id,message,from,updated_time,created_time&access_token=%s' % (post_id, access_token)
fb_comments = {'status':'OK', 'data':{'comments':[],'comments_count':0}}
while True:
response = requests.get(url)
if response.status_code == 200:
try:
keys = response.json().keys()
#найти is jason
if 'data' in keys:
content = response.json()['data']
keys = response.json().keys()
url = ''
if 'paging' in keys:
keys = response.json()['paging'].keys()
if 'next' in keys:
url = response.json()['paging']['next']
for fb_comment in content:
fb_comments['data']['comments'].append(fb_comment)
if url =='':
break
except (KeyError, TypeError):
fb_comments['status'] = 'Unknown error'
break
else:
fb_comments['status'] = str(response.status_code)
break
fb_comments['data']['comments_count'] = len(fb_comments['data']['comments'])
return fb_comments
# Получаем все данные о странице
def fb_get_all_data(fb_page_id, access_token):
#считываем все данные группы
fb_group = fb_get_group_data(fb_page_id, access_token)
if fb_group['status'] == 'OK':
print('Group id: %s name: %s' % (fb_group['data']['id'], fb_group['data']['name']))
print('User in group: %s' % fb_group['data']['users_count'])
#пишем в бд в БД
else:
print(fb_group['status'])
exit()
#считываем все посты
print('*************считываем все посты************')
data = fb_get_all_posts(fb_page_id, access_token)
if data['status'] == 'OK':
fb_posts = copy.deepcopy(data['data']['posts'])
posts_count = data['data']['posts_count']
# print(fb_posts[0])
print('Posts in group: %s' % posts_count)
for fb_post in fb_posts:
print('*************Пост %s от %s ************' % (fb_post['id'], fb_post['created_time']))
# тестиирум лайки к посту
data = fb_get_post_likes(fb_post['id'], access_token)
if data['status'] == 'OK':
fb_post['likes'] = copy.deepcopy(data['data']['likes'])
fb_post['likes_count'] = data['data']['likes_count']
#print('post likes to users')
for user_like in fb_post['likes']:
#print(user_like)
if not user_like['id'] in fb_group['data']['members']:
#print('new user %s'% user_like)
fb_group['data']['all_users'].append({'id':user_like['id'],'name':user_like['name'], 'is_group_mamber':0})
print('Likes count: %s' % fb_post['likes_count'])
else:
print(data['status'])
#тестируем комментарии к посту
data = fb_get_post_comments(fb_post['id'], access_token)
if data['status'] == 'OK':
fb_post['comments'] = copy.deepcopy(data['data']['comments'])
fb_post['comments_count'] = data['data']['comments_count']
print('Comments count: %s' % fb_post['comments_count'] )
#print('post likes to users')
for user_comments in fb_post['comments']:
#print(user_like)
keys = user_comments.keys()
comment_user_id = user_comments['from']['id'] if 'from' in keys else ''
comment_user_name = user_comments['from']['name'] if 'from' in keys else ''
if (not comment_user_id == '') and (not user_comments in fb_group['data']['members']):
#print('new user %s'% user_like)
fb_group['data']['all_users'].append({'id':comment_user_id,'name':comment_user_name, 'is_group_mamber':0})
else:
print(data['status'])
fb_group['data']['posts'] = copy.deepcopy(fb_posts)
fb_group['data']['posts_count'] = posts_count
else:
print(data['status'])
#print('до %s' % fb_group)
return fb_group
if __name__ == '__main__':
print ('Not runns in console') | eugeneks/zmeyka | fb_req.py | Python | mit | 10,919 |
# -*- coding: utf-8 -*-
"""
frest - flask restful api frame
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This project is the frame of the restful api server created with flask.
:copyright: (C) 2017 [email protected]
:license: MIT, see LICENSE for more details.
"""
import os
from flask_script import Server, Manager
from flask_migrate import Migrate, MigrateCommand
from app import app, db, routes, handler
from app.config import APP_DEFAULT_PORT, APP_SECRET_KEY, ENVIRONMENT
from app.modules.auth import login, token
if __name__ == '__main__':
port = int(os.environ.get('PORT', APP_DEFAULT_PORT))
app.secret_key = APP_SECRET_KEY
db.create_all()
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if ENVIRONMENT == 'production' or ENVIRONMENT == 'testing':
manager.add_command('runserver', Server(host='0.0.0.0', port=port, use_debugger=False))
else:
manager.add_command('runserver', Server(host='0.0.0.0', port=port, use_debugger=True))
manager.run()
| h4wldev/Frest | app.py | Python | mit | 1,064 |
import sys
#from OpenGL.GLUT import *
#from OpenGL.GLU import *
#from OpenGL.GL import *
class abstract:
params = {}
windowId = None
terminated = False
def initParams(self):
return self
def __init__(self):
self.initParams().init()
return
def init(self):
return
def mouse(self, button, state, x, y):
return
def mouseMotion(self, x, y):
return
def keyboard(self, asciiCode, x, y):
return
def keyboardSpecial(self, key, x, y):
return
def idle(self):
return
def timer(self, value):
return
def render(self):
return
def reshape(self, width, height):
return
def run(self):
return self
def destroy(self):
del self
return
def select(self):
return self.activate()
def activate(self):
return self
def redisplay(self):
return self
def hide(self):
return self
def show(self):
return self
def title(self, title):
return self
def setPosition(self, x, y):
return self
def setResolution(self, width, height):
return self
| nv8h/PyRattus | base/modules/rat/application/abstract.py | Python | mit | 1,309 |
#! /usr/bin/python
import event
import nxt.locator
import nxt.motor as motor
brick = nxt.locator.find_one_brick()
height_motor = motor.Motor(brick, motor.PORT_A)
height_motor.turn(127, 5000, brake=False) | rolisz/walter_waiter | lower.py | Python | mit | 203 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from GestureAgentsTUIO.Tuio import TuioAgentGenerator
import GestureAgentsPygame.Screen as Screen
from pygame.locals import *
class MouseAsTuioAgentGenerator(object):
def __init__(self):
self.pressed = False
self.myagent = None
self.sid = -1
self.screensize = Screen.size
def event(self, e):
if e.type == MOUSEBUTTONDOWN:
self.pressed = True
self.myagent = TuioAgentGenerator.makeCursorAgent()
self._updateAgent(self.myagent, e)
self.myagent.newAgent(self.myagent)
self.myagent.newCursor(self.myagent)
elif e.type == MOUSEBUTTONUP:
self.pressed = False
self._updateAgent(self.myagent, e)
self.myagent.removeCursor(self.myagent)
self.myagent.finish()
self.myagent = None
elif e.type == MOUSEMOTION:
if self.pressed:
self._updateAgent(self.myagent, e)
self.myagent.updateCursor(self.myagent)
def _updateAgent(self, a, e):
a.pos = e.pos
a.posx = e.pos[0]
a.posy = e.pos[1]
a.sessionid = self.sid
a.xmot = 0
a.ymot = 0
a.mot_accel = 0
| chaosct/GestureAgents | GestureAgentsPygame/Mouse.py | Python | mit | 1,272 |
#!/usr/bin/python3
# Filename: k_means_cluster.py
"""
A Machine learning algorithm for K mean clustering.
Date: 24th March, 2015 pm
"""
__author__ = "Anthony Wanjohi"
__version__ = "1.0.0"
import random, fractions
def euclidean_distance(point, centroid):
'''Returns the euclidean distance between two points'''
assert type(point) is tuple
assert type(centroid) is tuple
#x and y values for the point and the centroid
point_x, point_y = point
centroid_x, centroid_y = centroid
#get euclidean distance
distance = ( (point_x - centroid_x) ** 2 ) + ( (point_y - centroid_y) ** 2 )
distance = distance ** (0.5)
return round(distance, 4)
def get_coordinates(points):
#get coordinates for the points given in tuple form
print("Please provide coordinates for the {} points. (x, y)".format(points))
coordinates = []
for coords in range(points):
#read as a tuple i.e (x, y)
user_coords = input()
user_coords = user_coords.split(',')
x, y = int(user_coords[0]), int(user_coords[1])
coordinates.append((x, y))
return coordinates
def get_coords(file_name):
'''Get coordinates from a file.'''
file_handle = open(file_name, "r")
file_coords = []
for content in file_handle:
content = content.replace(' ', "").replace("\n", "").replace('"', "").split(',')
coord = int(content[0]), int(content[1])
file_coords.append(coord)
return file_coords
def get_group_matrix(coords, centroid_one, centroid_two):
'''Returns a group matrix'''
euclid_distance = []
grp_matrix = []
for y in coords:
#get distance for each point in regard to centroid one
distance_one = euclidean_distance(y, centroid_one)
#get distance for each point in regard to centroid two
distance_two = euclidean_distance(y, centroid_two)
euclid_distance.append((distance_one, distance_two))
#group matrix condtions
if distance_one > distance_two:
grp_matrix.append((0, 1))
elif distance_one < distance_two:
grp_matrix.append((1, 0))
return grp_matrix
def get_avg_centroid(x_y_index, coords):
'''Returns new centroid coordinates if more than 1 point eppears in any cluster'''
x_coords, y_coords = [], []
for index in x_y_index:
#new_points.append(coords[index])
x, y = coords[index]
x_coords.append(x)
y_coords.append(y)
#get average of both x and y coords
x_coords = round(sum(x_coords) / (len(x_coords) * 1.0), 4)
y_coords = round(sum(y_coords) / (len(y_coords) * 1.0), 4)
centroid = (x_coords, y_coords)
return centroid
def k_means_clustering(points):
'''Return the group matrix given coordinates'''
coords = get_coordinates(points)
centroids = []
euclid_distance = []
group_distance = []
grp_matrix = []
#create an alphabet number mapping
alphabets = dict(A = 1,B = 2,C = 3,D = 4,E = 5,F = 6,G = 7,H = 8,I = 9,J = 10,K = 11,L = 12,M = 13,
N = 14,O = 15,P = 16,Q = 17,R = 18,S = 19,T = 20,U = 21,V = 22,W = 23,X = 24,Y = 25,Z = 26)
#get two random centroids
i = 0
limit = 2
#ensure that the points are not similar
while i <= limit:
k = random.randint(0, (points-1))
if k not in centroids:
centroids.append(k)
if len(centroids) is not 2:
limit *= 2
else:
break
#get the centroids as per the above rand positions
centroids = tuple(centroids)
i, j = centroids
centroid_one = coords[i]
centroid_two = coords[j]
print("\nRandom Centroids->",centroid_one, centroid_two)
#get the group matrix
grp_matrix = get_group_matrix(coords, centroid_one, centroid_two)
while True:
#iterate till group matrix is stable
#get the number of points in each cluster
a, b, m_index_values, n_index_values = [], [], [], []
for index, x_y_values in enumerate(grp_matrix):
m, n = x_y_values
a.append(m)
b.append(n)
if m == 1:
m_index_values.append(index)
elif n == 1:
n_index_values.append(index)
cluster_one_elems = sum(a)
cluster_two_elems = sum(b)
if cluster_one_elems == 1:
#use the same centroid from the previous one
centroid_one = centroid_one
elif cluster_one_elems > 1:
#new centroid is the average of the elements
centroid_one = get_avg_centroid(m_index_values, coords)
if cluster_two_elems == 1:
#use the same centroid used in the last iteration
centroid_two = centroid_two
elif cluster_two_elems > 1:
#new centroid is the average of the elements
centroid_two= get_avg_centroid(n_index_values, coords)
print("New Centroids->",centroid_one, centroid_two)
#get new group matrix
new_grp_matrix = get_group_matrix(coords, centroid_one, centroid_two)
#when no more change happens, stop iteration
if new_grp_matrix == grp_matrix:
return grp_matrix
grp_matrix = new_grp_matrix
if __name__ == "__main__":
guess = int(input('Enter the number of coordinates to input : '))
print(k_means_clustering(guess))
| TonyHinjos/Machine-Learning-Algorithms-Toolkit | k-means-clustering/k_means_cluster.py | Python | mit | 4,794 |
import subprocess
import os
#from tailorscad.config import ScadConfig
BASE_DIR = '/usr/bin/'
DEFAULT = BASE_DIR + 'openscad'
def build_with_openscad(state):
args = build_args_from_state(state)
out_call = ''
for arg in args:
out_call += ' ' + arg
print 'args:', out_call
try:
subprocess.check_call(args)
return True
except subprocess.CalledProcessError as (e):
print str(e)
return False
def build_args_from_state(state):
#executable = ScadConfig.open_scad if ScadConfig.open_scad else DEFAULT
executable = 'openscad'
replace = []
if state.params:
print 'state params:', state.params
# TODO: Handle string exceptions
replace = ['-D ' + '='.join((key, str(value)))
for key, value in state.params.iteritems()]
print 'state replace:', replace
# TODO: Handle different output types
output = os.path.join(state.output_directory, state.name + ".stl")
args = [executable, '-o', output]
if len(replace) >= 1:
args.extend(replace)
args.append(state.main_path)
return args
| savorywatt/tailorSCAD | tailorscad/builder/openscad.py | Python | mit | 1,142 |
# coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.contributor_email_v30_rc1 import ContributorEmailV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.contributor_orcid_v30_rc1 import ContributorOrcidV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.credit_name_v30_rc1 import CreditNameV30Rc1 # noqa: F401,E501
from orcid_api_v3.models.funding_contributor_attributes_v30_rc1 import FundingContributorAttributesV30Rc1 # noqa: F401,E501
class FundingContributorV30Rc1(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'contributor_orcid': 'ContributorOrcidV30Rc1',
'credit_name': 'CreditNameV30Rc1',
'contributor_email': 'ContributorEmailV30Rc1',
'contributor_attributes': 'FundingContributorAttributesV30Rc1'
}
attribute_map = {
'contributor_orcid': 'contributor-orcid',
'credit_name': 'credit-name',
'contributor_email': 'contributor-email',
'contributor_attributes': 'contributor-attributes'
}
def __init__(self, contributor_orcid=None, credit_name=None, contributor_email=None, contributor_attributes=None): # noqa: E501
"""FundingContributorV30Rc1 - a model defined in Swagger""" # noqa: E501
self._contributor_orcid = None
self._credit_name = None
self._contributor_email = None
self._contributor_attributes = None
self.discriminator = None
if contributor_orcid is not None:
self.contributor_orcid = contributor_orcid
if credit_name is not None:
self.credit_name = credit_name
if contributor_email is not None:
self.contributor_email = contributor_email
if contributor_attributes is not None:
self.contributor_attributes = contributor_attributes
@property
def contributor_orcid(self):
"""Gets the contributor_orcid of this FundingContributorV30Rc1. # noqa: E501
:return: The contributor_orcid of this FundingContributorV30Rc1. # noqa: E501
:rtype: ContributorOrcidV30Rc1
"""
return self._contributor_orcid
@contributor_orcid.setter
def contributor_orcid(self, contributor_orcid):
"""Sets the contributor_orcid of this FundingContributorV30Rc1.
:param contributor_orcid: The contributor_orcid of this FundingContributorV30Rc1. # noqa: E501
:type: ContributorOrcidV30Rc1
"""
self._contributor_orcid = contributor_orcid
@property
def credit_name(self):
"""Gets the credit_name of this FundingContributorV30Rc1. # noqa: E501
:return: The credit_name of this FundingContributorV30Rc1. # noqa: E501
:rtype: CreditNameV30Rc1
"""
return self._credit_name
@credit_name.setter
def credit_name(self, credit_name):
"""Sets the credit_name of this FundingContributorV30Rc1.
:param credit_name: The credit_name of this FundingContributorV30Rc1. # noqa: E501
:type: CreditNameV30Rc1
"""
self._credit_name = credit_name
@property
def contributor_email(self):
"""Gets the contributor_email of this FundingContributorV30Rc1. # noqa: E501
:return: The contributor_email of this FundingContributorV30Rc1. # noqa: E501
:rtype: ContributorEmailV30Rc1
"""
return self._contributor_email
@contributor_email.setter
def contributor_email(self, contributor_email):
"""Sets the contributor_email of this FundingContributorV30Rc1.
:param contributor_email: The contributor_email of this FundingContributorV30Rc1. # noqa: E501
:type: ContributorEmailV30Rc1
"""
self._contributor_email = contributor_email
@property
def contributor_attributes(self):
"""Gets the contributor_attributes of this FundingContributorV30Rc1. # noqa: E501
:return: The contributor_attributes of this FundingContributorV30Rc1. # noqa: E501
:rtype: FundingContributorAttributesV30Rc1
"""
return self._contributor_attributes
@contributor_attributes.setter
def contributor_attributes(self, contributor_attributes):
"""Sets the contributor_attributes of this FundingContributorV30Rc1.
:param contributor_attributes: The contributor_attributes of this FundingContributorV30Rc1. # noqa: E501
:type: FundingContributorAttributesV30Rc1
"""
self._contributor_attributes = contributor_attributes
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(FundingContributorV30Rc1, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FundingContributorV30Rc1):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/funding_contributor_v30_rc1.py | Python | mit | 6,669 |
from django.contrib import admin
from Players.models import Player
@admin.register(Player)
class PlayerAdmin(admin.ModelAdmin):
view_on_site = True
list_display = ('pk', 'first_name', 'last_name', 'number', 'team', 'position', 'age', 'height', 'weight')
list_filter = ['team', 'position']
search_fields = ['first_name', 'last_name']
# Disable delete when team has 5 players
def has_delete_permission(self, request, obj=None):
try:
return False if Player.objects.filter(team=obj.team).count() == 5 else True
except AttributeError:
pass
# Disable delete action form the list; not ideal, disables delete for all players
def get_actions(self, request):
actions = super(PlayerAdmin, self).get_actions(request)
del actions['delete_selected']
return actions | pawelad/BLM | Players/admin.py | Python | mit | 851 |
"""Th tests for the Rfxtrx component."""
# pylint: disable=too-many-public-methods,protected-access
import unittest
import time
from homeassistant.bootstrap import _setup_component
from homeassistant.components import rfxtrx as rfxtrx
from tests.common import get_test_home_assistant
class TestRFXTRX(unittest.TestCase):
"""Test the Rfxtrx component."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant(0)
def tearDown(self):
"""Stop everything that was started."""
rfxtrx.RECEIVED_EVT_SUBSCRIBERS = []
rfxtrx.RFX_DEVICES = {}
if rfxtrx.RFXOBJECT:
rfxtrx.RFXOBJECT.close_connection()
self.hass.stop()
def test_default_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'sensor', {
'sensor': {'platform': 'rfxtrx',
'automatic_add': True,
'devices': {}}}))
while len(rfxtrx.RFX_DEVICES) < 1:
time.sleep(0.1)
self.assertEqual(len(rfxtrx.RFXOBJECT.sensors()), 1)
def test_valid_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}}))
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True,
'debug': True}}))
def test_invalid_config(self):
"""Test configuration."""
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {}
}))
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'invalid_key': True}}))
def test_fire_event(self):
"""Test fire event."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'switch', {
'switch': {'platform': 'rfxtrx',
'automatic_add': True,
'devices':
{'0b1100cd0213c7f210010f51': {
'name': 'Test',
rfxtrx.ATTR_FIREEVENT: True}
}}}))
calls = []
def record_event(event):
"""Add recorded event to set."""
calls.append(event)
self.hass.bus.listen(rfxtrx.EVENT_BUTTON_PRESSED, record_event)
entity = rfxtrx.RFX_DEVICES['213c7f216']
self.assertEqual('Test', entity.name)
self.assertEqual('off', entity.state)
self.assertTrue(entity.should_fire_event)
event = rfxtrx.get_rfx_object('0b1100cd0213c7f210010f51')
event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18,
0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70])
rfxtrx.RECEIVED_EVT_SUBSCRIBERS[0](event)
self.hass.pool.block_till_done()
self.assertEqual(event.values['Command'], "On")
self.assertEqual('on', entity.state)
self.assertEqual(1, len(rfxtrx.RFX_DEVICES))
self.assertEqual(1, len(calls))
self.assertEqual(calls[0].data,
{'entity_id': 'switch.test', 'state': 'on'})
| Zyell/home-assistant | tests/components/test_rfxtrx.py | Python | mit | 4,095 |
import sys
import os.path
import subprocess
PY3 = sys.version >= '3'
from setuptools import setup, find_packages
# http://blogs.nopcode.org/brainstorm/2013/05/20/pragmatic-python-versioning-via-setuptools-and-git-tags/
# Fetch version from git tags, and write to version.py.
# Also, when git is not available (PyPi package), use stored version.py.
version_py = os.path.join(os.path.dirname(__file__), 'dame', 'version.py')
try:
version_git = subprocess.check_output(
["git", "describe", "--always"]).rstrip()
# Convert bytes to str for Python3
if PY3:
version_git = version_git.decode()
except:
with open(version_py, 'r') as fh:
version_git = fh.read().strip().split('=')[-1].replace('"', '')
version_msg = ("# Do not edit this file, "
"pipeline versioning is governed by git tags")
with open(version_py, 'w') as fh:
fh.write(version_msg + os.linesep +
"__version__='{}'\n".format(version_git))
setup(
name="dame",
author="Richard Lindsley",
version=version_git,
packages=find_packages(),
license="MIT",
entry_points={
'gui_scripts': [
'dame = dame.dame:main'
]
},
)
| richli/dame | setup.py | Python | mit | 1,201 |
#/usr/bin/env python
#Base Server -Chapter three -basicserver.py
import socket, traceback
host=''
port=8080
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host, port))
print "Waiting for connections..."
s.listen(1)
while True:
try:
clientsock, clientaddr=s.accept()
except KeyboardInterrupt:
raise
except:
traceback.print_exc()
continue
try:
print "Got connection from", clientsock.getpeername()
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
try:
clientsock.close()
except KeyboardInterrupt:
raise
except:
traceback.print_exc()
| jac2130/BayesGame | foundations-of-python-network-programming/python2/01/basicserver.py | Python | mit | 745 |
'''
Auxiliary code providing vector-valued scoring functions
for convenient use in the parameter-free Ladder Mechanism.
Original algorithm by Avrim Blum and Moritz Hardt
Python implementation by Jamie Hall and Moritz Hardt
MIT License
'''
from sklearn.utils import check_consistent_length
from functools import wraps
import numpy as np
def validate_input(metric):
@wraps(metric)
def wrapped(y, y_pred):
_check_y_shapes(y, y_pred)
return metric(y, y_pred)
return wrapped
@validate_input
def squared_error(y, y_pred):
return ((y - y_pred)**2)
@validate_input
def absolute_error(y, y_pred):
return np.abs(y-y_pred)
@validate_input
def zero_one_loss(y, y_pred):
return np.sum(y != y_pred)
def _check_y_shapes(y_true, y_pred):
''' Check that two target vectors are compatible.
This is based on sklearn's validation in sklearn.metrics.regression._check_reg_targets
and sklearn.metrics.classification._check_targets.
'''
check_consistent_length(y_true, y_pred)
if y_true.ndim == 1:
y_true = y_true.reshape((-1, 1))
if y_pred.ndim == 1:
y_pred = y_pred.reshape((-1, 1))
if y_true.shape[1] != y_pred.shape[1]:
raise ValueError("y_true and y_pred have different number of output "
"({0}!={1})".format(y_true.shape[1], y_pred.shape[1])) | nerdcha/escalator | escalator/scorers.py | Python | mit | 1,361 |
import sys
import os
import glob
import inspect
import pylab as pl
from numpy import *
from scipy import optimize
import pickle
import time
import copy
cmd_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]) + "/templates")
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from templutils import *
import pylabsetup
pl.ion()
#fits the vacca leibundgut model to data:
# a linear decay, with a gaussian peak on top, an exponential rise, and possibly a second gaussian (typically the Ia second bump around phase=25 days
def minfunc(p, y, x, e, secondg, plot=False):
'''
p is the parameter list
if secondg=1: secondgaussian added
if secondg=0: secondgaussian not
parameters are:
p[0]=first gaussian normalization (negative if fitting mag)
p[1]=first gaussian mean
p[2]=first gaussian sigma
p[3]=linear decay offset
p[4]=linear decay slope
p[5]=exponxential rise slope
p[6]=exponential zero point
p[7]=second gaussian normalization (negative if fitting mag)
p[8]=second gaussian mean
p[9]=second gaussian sigma
'''
if plot:
pl.figure(3)
pl.errorbar(x, y, yerr=e, color='k')
import time
# time.sleep(1)
# print sum(((y-mycavvaccaleib(x,p,secondg=True))**2))
if secondg > 0:
return sum(((y - mycavvaccaleib(x, p, secondg=True)) ** 2) / e ** 2)
else:
return sum(((y - mycavvaccaleib(x, p, secondg=False)) ** 2) / e ** 2)
import scipy.optimize
if __name__ == '__main__':
lcv = np.loadtxt(sys.argv[1], unpack=True)
secondg = False
try:
if int(sys.argv[2]) > 0:
secondg = True
except:
pass
x = lcv[1]
y = lcv[2]
e = lcv[3]
mjd = lcv[0]
ax = pl.figure(0, figsize=(10,5)).add_subplot(111)
#pl.errorbar(x, y, yerr=e, color="#47b56c", label="data")
p0 = [0] * 10
p0[0] = -4
peakdate = x[np.where(y == min(y))[0]]
if len(peakdate) > 1:
peakdate = peakdate[0]
p0[1] = peakdate + 5
p0[2] = 10 # sigma
#pl.draw()
lintail = np.where(x > peakdate + 50)[0]
if len(lintail) < 1:
print "no tail data"
linfit = np.polyfit(x[-2:], y[-2:], 1)
p0[3] = linfit[1]
p0[4] = linfit[0]
else:
linfit = np.polyfit(x[lintail], y[lintail], 1)
p0[3] = linfit[1]
p0[4] = linfit[0]
p0[5] = 0.1
p0[6] = peakdate - 20
p0[7] = -1
p0[8] = peakdate + 25
p0[9] = 10
pl.figure(3)
pl.clf()
# pf= scipy.optimize.minimize(minfunc,p0,args=(y,x,1), method='Powell')#,options={'maxiter':5})
if secondg:
p0[0] += 1.5
p0[1] *= 2
pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=True), 'm')
pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 1), method='Powell') # ,options={'maxiter':5})
else:
pl.plot(x[10:], mycavvaccaleib(x[10:], p0, secondg=False), 'k')
pf = scipy.optimize.minimize(minfunc, p0, args=(y[10:], x[10:], e[10:], 0), method='Powell') # ,options={'maxiter':5})
#pl.figure(4)
pl.figure(0)
ax.errorbar(mjd+0.5-53000, y, yerr=e, fmt=None, ms=7,
alpha = 0.5, color='k', markersize=10,)
ax.plot(mjd+0.5-53000, y, '.', ms=7,
alpha = 0.5, color='#47b56c', markersize=10,
label = "SN 19"+sys.argv[1].split('/')[-1].\
replace('.dat', '').replace('.', ' '))
# mycavvaccaleib(x,pf.x, secondg=True)
mycavvaccaleib(x, pf.x, secondg=secondg)
ax.plot(mjd[10:]+0.5-53000, mycavvaccaleib(x[10:], pf.x, secondg=secondg), 'k',
linewidth=2, label="vacca leibundgut fit") # , alpha=0.5)
# pl.plot(x,mycavvaccaleib(x,pf.x, secondg=True), 'k',linewidth=2, label="fit")
xlen = mjd.max() - mjd.min()
ax.set_xlim(mjd.min()-xlen*0.02+0.5-53000, mjd.max()+xlen*0.02+0.5-53000)
ax.set_ylim(max(y + 0.1), min(y - 0.1))
ax2 = ax.twiny()
Vmax = 2449095.23-2453000
ax2.tick_params('both', length=10, width=1, which='major')
ax2.tick_params('both', length=5, width=1, which='minor')
ax2.set_xlabel("phase (days)")
ax2.set_xlim((ax.get_xlim()[0] - Vmax, ax.get_xlim()[1] - Vmax))
# pl.ylim(10,21)
pl.draw()
pl.legend()
ax.set_xlabel("JD - 24530000")
ax.set_ylabel("magnitude")
#pl.title(sys.argv[1].split('/')[-1].replace('.dat', '').replace('.', ' '))
#pl.show()
pl.tight_layout()
pl.savefig("../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf'))
cmd = "pdfcrop " + "../fits/" + sys.argv[1].split('/')[-1].replace('.dat', '.vdfit.pdf')
print cmd
os.system(cmd)
| fedhere/SESNCfAlib | vaccaleibundgut.py | Python | mit | 4,704 |
from tailorscad.builder.openscad import build_with_openscad
from tailorscad.builder.coffeescad import build_with_coffeescad
from tailorscad.constants import OPENSCAD
from tailorscad.constants import COFFEESCAD
def build_from_state(state):
if state.scad_type is OPENSCAD:
build_with_openscad(state)
if state.scad_type is COFFEESCAD:
build_with_coffeescad(state)
| savorywatt/tailorSCAD | tailorscad/builder/__init__.py | Python | mit | 389 |
import yaml
from os import makedirs
from os.path import join,dirname,realpath,isdir
script_dir = dirname(realpath(__file__))
default_yml_filepath = join(script_dir,'defaults.yml')
defaults = {
"output_dir": 'output',
"header_img_dir": 'imgs/headers/',
"scaled_img_dir": 'imgs/scaled/',
"original_img_dir": 'imgs/original/',
"header_img_url": 'imgs/headers/',
"scaled_img_url": 'imgs/scaled/',
"original_img_url": 'imgs/original/',
"template_dir": join(script_dir,'templates'),
"max_article_img_width": 710,
"max_avatar_width": 710,
"database_file": "database.yml",
"static_dir": join(script_dir,'static'),
"copyright_msg": None,
"extra_links": [],
"import_to_discourse": False,
"strapline": None,
}
config = dict()
def getConfig():
if not config:
raise RuntimeError('config not loaded yet')
return config
def loadConfig(yml_filepath):
config.update(defaults)
with open(yml_filepath) as f:
patch = yaml.load(f.read())
config.update(patch)
# make paths absolute
config['header_img_dir'] = join(config['output_dir'],config['header_img_dir'])
config['scaled_img_dir'] = join(config['output_dir'],config['scaled_img_dir'])
config['original_img_dir'] = join(config['output_dir'],config['original_img_dir'])
config['database_file'] = join(config['output_dir'],config['database_file'])
def makeDirs():
if not config:
raise RuntimeError('config not loaded yet')
for key in ['header_img_dir','scaled_img_dir','original_img_dir']:
path = config[key]
if not isdir(path):
makedirs(path)
| naggie/dsblog | dsblog/environment.py | Python | mit | 1,659 |
#!/usr/bin/python
import sys
text = sys.stdin.read()
print 'Text:',text
words = text.split()
print 'Words:',words
wordcount = len(words)
print 'Wordcount:',wordcount
| MarsBighead/mustang | Python/somescript.py | Python | mit | 167 |
#!/usr/bin/python
import socket
import sys
HOST, PORT = "24.21.106.140", 8080
# Create a socket (SOCK_STREAM means a TCP socket)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
# Connect to server and send data
print "Connected to ", HOST, ":", PORT, "\n","Awaiting input\n"
data = sys.stdin.readline()
sock.connect((HOST, PORT))
print "Connected to ", HOST, ":", PORT, "\n","Awaiting input\n"
exit = False
while exit != True:
sock.sendall(data + "\n")
if data.strip() == 'bye':
exit = True
received = sock.recv(1024)
print "Sent: " , data
print "Received: " , received
data = sys.stdin.readline()
# Receive data from the server and shut down
finally:
sock.close()
| hillst/RnaMaker | bin/daemons/testclient.py | Python | mit | 789 |
DATASET_DIR = '/tmp'
BRAIN_DIR = '/tmp'
GENRES = [
'blues', 'classical', 'country', 'disco', 'hiphop',
'jazz', 'metal', 'pop', 'reggae', 'rock'
]
NUM_BEATS = 10
KEEP_FRAMES = 0
TRAIN_TEST_RATIO = [7, 3]
MODE = 'nn'
PCA = False
FEATURES = ['mfcc', 'dwt', 'beat']
MFCC_EXTRA = ['delta', 'ddelta', 'energy']
DWT = ['mean', 'std', 'max', 'min']
FEATURES_LENGTH = {
'mfcc' : 160,
'dwt' : 112,
'beat' : 11
}
FRAME_LENGTH = 0.025
HOP_LENGTH = 0.005
N_MFCC = 13
W_FRAME_SCALE = 10
NN = {
'NUM_HIDDEN_LAYERS' : 2,
'HIDDEN_INPUTS' : [1024, 1024],
'RANDOM' : True,
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
}
CNN = {
'NUM_HIDDEN_LAYERS' : 2,
'NUM_DENSE_LAYERS' : 1,
'HIDDEN_FEATURES' : [32, 64],
'DENSE_INPUTS' : [128],
'INPUT_SHAPE' : [16, 17],
'PATCH_SIZE' : [5, 5],
'RANDOM' : False,
'STRIDES' : [1, 1, 1, 1],
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
} | kapilgarg1996/gmc | gmc/conf/global_settings.py | Python | mit | 1,053 |
'''
@author: KyleLevien
'''
from ..defaultpackage.package import Package
class _Ghostview(Package):
def __init__(self):
Package.__init__(self)
| mason-bially/windows-installer | packages/_Ghostview/_Ghostview.py | Python | mit | 169 |
#!/usr/bin/python3
#-*- coding: utf-8 -*-
##############################################
# Home : http://netkiller.github.io
# Author: Neo <[email protected]>
# Upgrade: 2021-09-05
##############################################
# try:
import os, sys
module = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, module)
from netkiller.docker import *
# except ImportError as err:
# print("%s" %(err))
extra_hosts = [
'mongo.netkiller.cn:172.17.195.17', 'eos.netkiller.cn:172.17.15.17',
'cfca.netkiller.cn:172.17.15.17'
]
nginx = Services('nginx')
nginx.image('nginx:latest')
nginx.container_name('nginx')
# service.restart('always')
# service.hostname('www.netkiller.cn')
nginx.extra_hosts(extra_hosts)
# service.extra_hosts(['db.netkiller.cn:127.0.0.1','cache.netkiller.cn:127.0.0.1','api.netkiller.cn:127.0.0.1'])
# service.environment(['TA=Asia/Shanghai'])
# service.ports(['8080:8080'])
nginx.depends_on('test')
sms = Services('sms')
sms.image('sms:latest')
sms.container_name('nginx')
# sms.restart('always')ing
sms.hostname("7899")
sms.depends_on(['aaa', 'bbb', 'ccc'])
# # sms.debug()
test = Services('test')
test.image('test:latest')
# # sms.container_name('nginx')ß
# # sms.restart('always')
# # sms.hostname('www.netkiller.cn')
test.depends_on(nginx)
test.logging('fluentd', {
'fluentd-address': 'localhost:24224',
'tag': 'dev.redis.sfzito.com'
})
# # test.depends_on_object([service,sms])
# # test.debug()
development = Composes('development')
development.version('3.9')
development.services(nginx)
development.services(sms)
development.services(test)
# compose.networks(network)
# compose.networks(mynet)
# compose.volumes(volume)
development.workdir('/tmp/compose')
testing = Composes('testing')
testing.version('3.9')
testing.services(nginx)
testing.services(sms)
testing.services(test)
testing.workdir('/tmp/compose')
staging = Composes('staging')
staging.version('3.9')
staging.services(sms)
if __name__ == '__main__':
try:
docker = Docker({
'DOCKER_HOST': 'ssh://[email protected]',
'SSS': 'sdfff'
})
docker.sysctl({'neo': '1'})
docker.environment(development)
docker.environment(testing)
docker.main()
except KeyboardInterrupt:
print("Crtl+C Pressed. Shutting down.")
| oscm/devops | demo/docker.py | Python | mit | 2,328 |
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2010 Tampere University of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Module for running keyword-driven tests
"""
from __future__ import with_statement
import time
import datetime
import re
from adapterlib.ToolProtocol import *
from adapterlib.ToolProtocolHTTP import *
import adapterlib.keyword as keyword
import adapterlib.keywordproxy as keywordproxy
from adapterlib.logger import KeywordLogger
class AdapterCompleter(object):
""" Simple class for doing tab-completion in interactive mode"""
def __init__(self, keywords ):
self.keywords = sorted(keywords)
def complete(self, text, state ):
response = None
if state == 0:
if text:
self.matches = [s for s in self.keywords if s and s.startswith(text)]
else:
self.matches = self.keywords[:]
try:
response = self.matches[state]
except IndexError:
response = None
return response
class Target(object):
def __init__(self,name,):
self.__name = name
def setup(self):
raise NotImplementedError()
def cleanup(self):
raise NotImplementedError()
@property
def name(self):
return self.__name
def takeScreenShot(self, path):
return False
class TestRunner(object):
"""
TestRunner class is used to run Keyword-driven tests.
The class allows test to be run interactively (given through stdin), from
file or from server.
To run tests from a server, TestRunner uses classes ToolProtocol and
ToolProtocolHTTP.
"""
def __init__(self, targets, delay, record = False ):
"""
Initializer.
@type targets: list
@param targets: list of System under test (SUT) identifiers.
@type delay: float
@param delay: Wait-time between consequtive keywords (in seconds)
@type record: boolean
@param record: Is the test recorded to html-file
"""
self._targetNames = targets
self._targets = []
self.delay = delay
self._rec_process = None
self._kwCount = 1
self._logger = None
self._separator = " "
if record:
self._logger = KeywordLogger()
self._kw_cache = {}
# Special commands listed here for interactive mode completer
self._commands = {}
self._commands["exit"] = ["quit","q","exit"]
self._commands["kws"] = ["list","kws","list full","kws full"]
self._commands["info"] = ["info"]
self._commands["special"] = []
def _setupTestAutomation(self):
"""Sets up test automation environment
@rtype: boolean
@returns: True if success, False otherwise
"""
raise NotImplementedError()
def _cleanupTestAutomation(self):
"""Cleans up test automation environment"""
raise NotImplementedError()
def __setTarget(self,targetName):
if re.match("['\"].*['\"]",targetName):
targetName = targetName[1:-1]
if targetName == "test" or targetName == "testi":
print "Warning: 'test' and 'testi' considered dummy targets."
return True
for t in self._targets:
if t.name == targetName:
self._activeTarget = t
return True
return False
def initTest(self):
"""
Inits a test run.
Creates a log file and starts recording if defined.
"""
print "Setting up testing environment..."
if not self._setupTestAutomation():
return False
print "setup complete"
self._activeTarget = self._targets[0]
if self._logger:
print "Recording test to a file"
self._logger.startLog()
return True
def _stopTest(self):
"""
Stops a test run.
Closes the log-file and stops recording process.
"""
print "Cleaning up testing environment..."
self._cleanupTestAutomation()
print "clean up complete"
if self._logger:
self._logger.endLog()
print "Test finished"
def endTest(self):
print "Shutting down"
self._stopTest()
def keywordInfo(self, kw ):
kws = self._getKeywords()
if kw in kws:
print kw
self.printKw(kw,"#",kws[kw][1])
def printKw(self,kw,header,text):
print header*len(kw)
print
docstring = text.splitlines()
strip_len = 0
if len(docstring[0]) == 0:
docstring = docstring[1:]
for line in docstring:
if len(line.strip()) > 0:
first_line = line.lstrip()
strip_len = len(line) - len(first_line)
break
for line in docstring:
print line[strip_len:].rstrip()
print
def listKeywords(self, basekw = keyword.Keyword,full=False,header="#"):
kws = self._getKeywords({},basekw)
kws_keys = sorted(kws.keys())
for kw in kws_keys:
print kw
if full:
self.printKw(kw,header,kws[kw][1])
def _getKeywords(self, kw_dictionary = {}, basekw = keyword.Keyword):
use_cache = len(kw_dictionary) == 0
if use_cache and basekw in self._kw_cache:
return self._kw_cache[basekw]
for kw in basekw.__subclasses__():
kw_name = str(kw)[str(kw).rfind('.')+1:str(kw).rfind("'")]
if not kw_name.endswith("Keyword"):
kw_dictionary[kw_name] = (str(kw.__module__),str(kw.__doc__))
self._getKeywords(kw_dictionary,kw)
if use_cache:
self._kw_cache[basekw] = kw_dictionary
return kw_dictionary
def __instantiateKeywordProxyObject(self,kwproxy, kwName,kwAttr,kwproxy_class):
kwobject = None
try:
kwmodule = __import__(kwproxy_class, globals(), locals(), [kwproxy], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(kwmodule,kwproxy)()
if not kwobject.initialize(kwName, kwAttr,self._activeTarget):
kwobject = None
if kwobject:
print 'Recognized keyword: %s' % kwName
print 'Attributes: %s' % kwAttr
except Exception, e:
print e
print "Error: KeywordProxy error"
kwobject = None
return kwobject
def __instantiateKeywordObject(self,kw_name,attributes,kw_class):
kwobject = None
try:
kwmodule = __import__(kw_class, globals(), locals(), [kw_name], -1)
# kwobject = eval("kwmodule." + kw + "()")
kwobject = getattr(kwmodule,kw_name)()
print 'Recognized keyword: %s' % kw_name
print 'Attributes: %s' % attributes
if not kwobject.initialize(attributes,self._activeTarget):
print "Invalid parameters"
kwobject = None
except Exception, e:
print e
print "Error: Keyword not recognized!"
kwobject = None
return kwobject
def _instantiateKeyword(self, kwName, kwAttr):
kw_dictionary = self._getKeywords()
kwproxy_dictionary = self._getKeywords({}, keywordproxy.KeywordProxy)
kwobject = None
for kw in kw_dictionary:
if kw.lower() == kwName.lower():
kwobject = self.__instantiateKeywordObject(kw,kwAttr,kw_dictionary[kw][0])
break
else:
for kwproxy in kwproxy_dictionary:
kwobject = self.__instantiateKeywordProxyObject(kwproxy, kwName,kwAttr,kwproxy_dictionary[kwproxy][0])
if kwobject:
break
if not kwobject:
print "Error: Keyword not recognized!"
return kwobject
def __executeKeyword(self, kw):
"""
Executes a single keyword.
Searches a corresponding keyword object from the list of keywords and executes the keyword with that object.
@type kw: string
@param kw: executed keyword
@rtype: boolean or string
@return: True if execution was succesfull; False if execution was succesdfull, but the keyword returned False;
Error if there was problems in the execution.
"""
print ""
print "Executing keyword: %s" % kw
#Which keyword
result = False
kw = kw.strip()
if kw.startswith("kw_"):
kw = kw[3:].strip()
# Testengine-note: generate-taskswitcher uses space as separator
if kw.startswith("LaunchApp") or kw.startswith("SetTarget"):
if not (kw.startswith("LaunchApp#") or kw.startswith("SetTarget#")):
kw = kw.replace(" ",self._separator,1)
kw_split = kw.split(self._separator,1)
kwName = kw_split[0].strip()
if len(kw_split) == 2:
kwAttr = kw_split[1].strip()
else:
kwAttr = ""
#Changing target
if kwName.lower() == "settarget":
result = self.__setTarget(kwAttr)
print 'result: %s' % str(result)
return result
kwobject = self._instantiateKeyword(kwName,kwAttr)
if not kwobject:
return "ERROR"
startTime = datetime.datetime.now()
result = kwobject.execute()
execTime = datetime.datetime.now() - startTime
print 'result: %s' % str(result)
kwDelay = kwobject.delay
if kwDelay != -1:
if self.delay > kwDelay:
kwDelay = self.delay
time.sleep(kwDelay)
if self._logger:
self._logger.logKeyword(self._activeTarget, kwobject, result, str(execTime))
self.kwCount = self._kwCount + 1
return result
def _handleSpecialCommands(self,command):
return False
def runInteractive(self):
"""
Runs an interactive test.
Keywords are read from stdin.
"""
# Only import here, so that we can use completion mechanism
# Readline only available in unix
try:
import readline
kws = self._getKeywords({}, keyword.Keyword).keys()
for command_list in self._commands.values():
kws.extend(command_list)
readline.set_completer(AdapterCompleter(kws).complete)
readline.parse_and_bind('tab: complete')
except:
pass
while True:
try:
kw = raw_input(">").strip()
if kw in self._commands["exit"]:
return
elif kw == "":
continue
kw_split = kw.split(" ")
if kw_split[0] in self._commands["kws"]:
if len(kw_split) > 1 and kw_split[1]=="full" and " ".join(kw_split[0:2] )in self._commands["kws"]:
if len(kw_split) == 3:
char = kw_split[2]
else:
char = "#"
self.listKeywords(full=True,header=char)
else:
self.listKeywords(full=False)
elif kw_split[0] in self._commands["info"] and len(kw_split) == 2:
self.keywordInfo(kw_split[1])
elif not self._handleSpecialCommands(kw):
self.__executeKeyword(kw)
except EOFError:
break
def runFromServer(self, address, port, username = None, protocol= None ):
"""
Runs a test from server.
@type address: string
@param address: Address of the server
@type port: integer
@param port: Port of the server
@type username: string
@param username: Username is required when using http or https protocol
@type protocol: string
@param protocol: Protocol that is used in the connection. Options are http and https.
Plain socketis used if parameter not given.
"""
toolProtocol = None
#while True:
if(address != None and port != None):
if(protocol):
base,path = address.split("/",1)
toolProtocol = ToolProtocolHTTP()
toolProtocol.init(base,path,port,username,protocol)
else:
toolProtocol = ToolProtocol()
toolProtocol.init(address,port)
if toolProtocol.hasConnection() == False:
#print "Connection to the MBT server failed, reconnecting..."
print "Connection to the MBT server failed."
# time.sleep(5)
return
#else:
# break
while True:
kw = ""
#if passive:
# kw = toolProtocol.receiveKeyword()
#else:
kw = toolProtocol.getKeyword()
if (kw == '' or kw =='\n' or kw == "ERROR"):
return
result = self.__executeKeyword(kw)
if(result == "ERROR"):
toolProtocol.putResult(False)
toolProtocol.bye()
return
toolProtocol.putResult(result)
def runFromFile(self, fileName ):
"""
Runs a test from file.
@type fileName: string
@param fileName: path to the file that contains the test
"""
try:
with open(fileName,'r') as inputFile:
for line in inputFile:
kw = line.strip()
if not kw:
break
result = self.__executeKeyword(kw)
if(result == "ERROR"):
break
except IOError:
print "Error when reading file: %s" % fileName
| tema-mbt/tema-adapterlib | adapterlib/testrunner.py | Python | mit | 15,926 |
Subsets and Splits