repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
stone5495/NewsBlur | vendor/feedvalidator/demo/src/rdflib/exceptions.py | 18 | 2379 |
class Error(Exception):
"""Base class for rdflib exceptions."""
def __init__(self, msg=None):
Exception.__init__(self, msg)
self.msg = msg
class TypeCheckError(Error):
"""Parts of assertions are subject to type checks."""
def __init__(self, node):
Error.__init__(self, node)
self.type = type(node)
self.node = node
class SubjectTypeError(TypeCheckError):
"""Subject of an assertion must be an instance of URIRef."""
def __init__(self, node):
TypeCheckError.__init__(self, node)
self.msg = "Subject must be instance of URIRef or BNode: %s(%s)" \
% (self.node, self.type)
class PredicateTypeError(TypeCheckError):
"""Predicate of an assertion must be an instance of URIRef."""
def __init__(self, node):
TypeCheckError.__init__(self, node)
self.msg = "Predicate must be a URIRef instance: %s(%s)" \
% (self.node, self.type)
class ObjectTypeError(TypeCheckError):
"""Object of an assertion must be an instance of URIRef, Literal,
or BNode."""
def __init__(self, node):
TypeCheckError.__init__(self, node)
self.msg = "Object must be instance of URIRef, Literal, or BNode: %s(%s)" % \
(self.node, self.type)
class ContextTypeError(TypeCheckError):
"""Context of an assertion must be an instance of URIRef."""
def __init__(self, node):
TypeCheckError.__init__(self, node)
self.msg = "Context must be instance of URIRef or BNode: %s(%s)" \
% (self.node, self.type)
class ParserError(Error):
"""RDF Parser error."""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class SerializerDispatchNameError(Error):
"""No name set..."""
def __init__(self, msg):
Error.__init__(self)
self.msg = msg
class SerializerDispatchNameClashError(Error):
"""Name clash..."""
def __init(self, msg):
Error.__init__(self)
self.msg = msg
class ParserDispatchNameError(Error):
"""No name set..."""
def __init__(self, msg):
Error.__init__(self)
self.msg = msg
class ParserDispatchNameClashError(Error):
"""Name clash..."""
def __init(self, msg):
Error.__init__(self)
self.msg = msg
| mit | -3,535,963,390,637,901,000 | 29.113924 | 85 | 0.577554 | false | 3.874593 | false | false | false |
sirex/Misago | misago/users/views/admin/warnings.py | 8 | 2683 | from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from misago.admin.views import generic
from misago.users.models import WarningLevel
from misago.users.forms.admin import WarningLevelForm
class WarningsAdmin(generic.AdminBaseMixin):
root_link = 'misago:admin:users:warnings:index'
Model = WarningLevel
Form = WarningLevelForm
templates_dir = 'misago/admin/warnings'
message_404 = _("Requested warning level does not exist.")
class WarningsList(WarningsAdmin, generic.ListView):
ordering = (('level', None),)
class NewWarning(WarningsAdmin, generic.ModelFormView):
message_submit = _('New warning level "%(name)s" has been saved.')
class EditWarning(WarningsAdmin, generic.ModelFormView):
message_submit = _('Warning level "%(name)s" has been edited.')
class DeleteWarning(WarningsAdmin, generic.ButtonView):
def button_action(self, request, target):
target.delete()
message = _('Warning level "%(name)s" has been deleted.')
messages.success(request, message % {'name': target.name})
class MoveDownWarning(WarningsAdmin, generic.ButtonView):
def button_action(self, request, target):
try:
other_target = WarningLevel.objects.filter(level__gt=target.level)
other_target = other_target.earliest('level')
except WarningLevel.DoesNotExist:
other_target = None
if other_target:
other_target.level, target.level = target.level, other_target.level
other_target.save(update_fields=['level'])
target.save(update_fields=['level'])
message = _('Warning level "%(name)s" has '
'been moved below "%(other)s".')
targets_names = {'name': target.name, 'other': other_target.name}
messages.success(request, message % targets_names)
class MoveUpWarning(WarningsAdmin, generic.ButtonView):
def button_action(self, request, target):
try:
other_target = WarningLevel.objects.filter(level__lt=target.level)
other_target = other_target.latest('level')
except WarningLevel.DoesNotExist:
other_target = None
if other_target:
other_target.level, target.level = target.level, other_target.level
other_target.save(update_fields=['level'])
target.save(update_fields=['level'])
message = _('Warning level "%(name)s" has '
'been moved above "%(other)s".')
targets_names = {'name': target.name, 'other': other_target.name}
messages.success(request, message % targets_names)
| gpl-2.0 | -7,912,987,130,653,455,000 | 36.263889 | 79 | 0.654864 | false | 4.179128 | false | false | false |
Driste/Freyr | validate.py | 1 | 1956 | import re
import regex
class Rule(dict):
''' breaks the rule into its parts along with validating it '''
def __init__(self, rule):
''' self['valid'] will be changed at any point to show whether the rule is valid or not. Error will tell you where.'''
self['valid'] = True
self['error'] = None
self['rawRule'] = rule
self.header()
def __getattr__(self, i):
''' Get any value from the dict '''
return self[i]
def __str__(self):
# Return the original rule.
return self['rawRule']
def __len__(self):
# Return the amount of options minus [error, valid, rawRule]
return len(self.keys()) - 3
def header(self):
''' maps the header options to self'''
if re.match(regex.rule_header, self['rawRule']):
header = re.match(regex.rule_header, self['rawRule']).groupdict()
for option in header:
self[option] = header[option]
else:
self['valid'] = False
self['error'] = "header"
def generalOptions(self):
pass
def payloadDetection(self):
pass
def nonpayloadDetection(self):
pass
def postDetection(self):
pass
def checkOptions(self):
''' Make sure all the options are valid '''
pass
def checkGutters(self):
''' Check between all the options to make sure there is nothing unknown '''
pass
if __name__ == "__main__":
myFile = open("rules/community.rules")
rule = 'alert tcp 192.168.100.40 65535 -> $HOME_NET !45:56 (content:"|00 01 86 a5|"; msg:"This is the test rule.";)'
r = Rule(rule)
print r.srcaddress
'''
i = 0
rule = {}
for line in myFile:
rule[i] = Rule(line)
if not rule[i].valid:
print rule[i].rawRule
i += 1
'''
| bsd-2-clause | 3,603,745,722,846,058,500 | 25.093333 | 126 | 0.529652 | false | 4.188437 | false | false | false |
Akasurde/ansible | lib/ansible/plugins/doc_fragments/files.py | 23 | 4100 | # -*- coding: utf-8 -*-
# Copyright: (c) 2014, Matt Martz <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
class ModuleDocFragment(object):
# Standard files documentation fragment
# Note: mode is overridden by the copy and template modules so if you change the description
# here, you should also change it there.
DOCUMENTATION = r'''
options:
mode:
description:
- The permissions the resulting file or directory should have.
- For those used to I(/usr/bin/chmod) remember that modes are actually octal numbers.
You must either add a leading zero so that Ansible's YAML parser knows it is an octal number
(like C(0644) or C(01777)) or quote it (like C('644') or C('1777')) so Ansible receives
a string and can do its own conversion from string into number.
- Giving Ansible a number without following one of these rules will end up with a decimal
number which will have unexpected results.
- As of Ansible 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or
C(u=rw,g=r,o=r)).
- If C(mode) is not specified and the destination file B(does not) exist, the default C(umask) on the system will be used
when setting the mode for the newly created file.
- If C(mode) is not specified and the destination file B(does) exist, the mode of the existing file will be used.
- Specifying C(mode) is the best way to ensure files are created with the correct permissions.
See CVE-2020-1736 for further details.
type: raw
owner:
description:
- Name of the user that should own the file/directory, as would be fed to I(chown).
type: str
group:
description:
- Name of the group that should own the file/directory, as would be fed to I(chown).
type: str
seuser:
description:
- The user part of the SELinux file context.
- By default it uses the C(system) policy, where applicable.
- When set to C(_default), it will use the C(user) portion of the policy if available.
type: str
serole:
description:
- The role part of the SELinux file context.
- When set to C(_default), it will use the C(role) portion of the policy if available.
type: str
setype:
description:
- The type part of the SELinux file context.
- When set to C(_default), it will use the C(type) portion of the policy if available.
type: str
selevel:
description:
- The level part of the SELinux file context.
- This is the MLS/MCS attribute, sometimes known as the C(range).
- When set to C(_default), it will use the C(level) portion of the policy if available.
type: str
unsafe_writes:
description:
- Influence when to use atomic operation to prevent data corruption or inconsistent reads from the target file.
- By default this module uses atomic operations to prevent data corruption or inconsistent reads from the target files,
but sometimes systems are configured or just broken in ways that prevent this. One example is docker mounted files,
which cannot be updated atomically from inside the container and can only be written in an unsafe manner.
- This option allows Ansible to fall back to unsafe methods of updating files when atomic operations fail
(however, it doesn't force Ansible to perform unsafe writes).
- IMPORTANT! Unsafe writes are subject to race conditions and can lead to data corruption.
type: bool
default: no
version_added: '2.2'
attributes:
description:
- The attributes the resulting file or directory should have.
- To get supported flags look at the man page for I(chattr) on the target system.
- This string should contain the attributes in the same order as the one displayed by I(lsattr).
- The C(=) operator is assumed as default, otherwise C(+) or C(-) operators need to be included in the string.
type: str
aliases: [ attr ]
version_added: '2.3'
'''
| gpl-3.0 | 5,775,242,545,966,777,000 | 47.235294 | 125 | 0.707317 | false | 4.047384 | false | false | false |
kirillsimin/ling720csce500 | dt-stats.py | 1 | 3474 | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 27 23:52:51 2015
@author: Kirill
"""
import sys
#from nltk.parse import stanford
from collections import defaultdict
import essayclasses.allessaysfile
import essayclasses.anessay
import essayclasses.asentence
exportedDB = 'data/all-essays.csv'
allEssaysFile = essayclasses.allessaysfile.AllEssaysFile(exportedDB)
allEssays = allEssaysFile.essaysList()
arDTtotal = 0
arAtotal = 0
arTheTotal = 0
arSentTotal = 0
narDTtotal = 0
narAtotal = 0
narTheTotal = 0
narSentTotal = 0
arDTdict = defaultdict(int)
narDTdict = defaultdict(int)
count = 0
for anEssay in allEssays:
if count == 10 : break
arDT = 0
arA = 0
arThe = 0
arSent = 0
narA = 0
narThe = 0
narDT = 0
narSent = 0
thisEssay = essayclasses.anessay.AnEssay(anEssay)
essayText = thisEssay.getText()
print('')
if thisEssay.isArabic():
print('ARABIC ESSAY:')
else:
print('NON-ARABIC ESSAY:')
for aSentence in thisEssay.getSentences():
thisSentence = essayclasses.asentence.ASentence(aSentence)
indefArt = 0
defArt = 0
dts = thisSentence.getDTs()
for dt in dts:
dt = dt[0].lower()
if dt == 'a' or dt == 'an':
indefArt += 1
if dt == 'the':
defArt += 1
if thisEssay.isArabic():
arDTdict[dt] += 1
else:
narDTdict[dt] += 1
if thisEssay.isArabic():
arSent += 1
arA += indefArt
arThe += defArt
arDT += thisSentence.countDTs()
#print ('Sentences: ', arSent)
#print ('Verbs: ', arDT)
else:
narSent += 1
narA += indefArt
narThe += defArt
narDT += thisSentence.countDTs()
#print ('Sentences: ', narSent)
#print ('Verbs: ', narDT)
if thisEssay.isArabic():
print('Sentences: ',arSent)
print('DTs: ',arDT)
print('A\'s: ', arA)
print('The\'s: ', arThe)
arAtotal += arA
arTheTotal += arThe
arDTtotal += arDT
arSentTotal += arSent
else:
print('Sentences: ',narSent)
print('DTs: ', narDT)
print('As: ', narA)
print('The\'s: ', narThe)
narAtotal += narA
narTheTotal += narThe
narDTtotal += narDT
narSentTotal += narSent
count += 1
arDTList = []
tuplesDTsCount = arDTdict.items()
for i in tuplesDTsCount:
if i[1] >= 10:
arDTList.append(i[::-1])
narDTList = []
tuplesDTsCount = narDTdict.items()
for i in tuplesDTsCount:
if i[1] >= 10:
narDTList.append(i[::-1])
print('')
print('Arabic DT\'s:')
print(sorted(arDTList, reverse = True))
print('')
print('Non-Arabic DT\'s:')
print(sorted(narDTList, reverse = True))
print('')
print ('Arabic Sentences: ',arSentTotal)
print ('Arabic DTs per Sentence: ', arDTtotal/arSentTotal)
print ('Arabic A\'s per Sentence: ', arAtotal/arSentTotal)
print ('Arabic The\'s per Sentence: ', arTheTotal/arSentTotal)
print('')
print ('Non-Arabic Sentences: ',narSentTotal)
print ('Non-Arabic DTs per Sentence: ', narDTtotal/narSentTotal)
print ('Non-Arabic As per Sentence: ', narAtotal/narSentTotal)
print ('Non-Arabic The\'s per Sentence: ', narTheTotal/narSentTotal) | gpl-2.0 | 8,683,535,123,587,791,000 | 21.712418 | 69 | 0.570524 | false | 3.079787 | false | false | false |
DedMemez/ODS-August-2017 | parties/DistributedPartyDanceActivity.py | 1 | 1307 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.parties.DistributedPartyDanceActivity
from toontown.parties import PartyGlobals
from toontown.parties.DistributedPartyDanceActivityBase import DistributedPartyDanceActivityBase
from toontown.toonbase import TTLocalizer
class DistributedPartyDanceActivity(DistributedPartyDanceActivityBase):
notify = directNotify.newCategory('DistributedPartyDanceActivity')
def __init__(self, cr):
DistributedPartyDanceActivityBase.__init__(self, cr, PartyGlobals.ActivityIds.PartyDance, PartyGlobals.DancePatternToAnims)
def getInstructions(self):
return TTLocalizer.PartyDanceActivityInstructions
def getTitle(self):
return TTLocalizer.PartyDanceActivityTitle
def load(self):
DistributedPartyDanceActivityBase.load(self)
parentGroup = self.danceFloor.find('**/discoBall_mesh')
correctBall = self.danceFloor.find('**/discoBall_10')
origBall = self.danceFloor.find('**/discoBall_mesh_orig')
if not correctBall.isEmpty():
numChildren = parentGroup.getNumChildren()
for i in xrange(numChildren):
child = parentGroup.getChild(i)
if child != correctBall:
child.hide() | apache-2.0 | -7,339,450,117,566,262,000 | 44.75 | 131 | 0.714614 | false | 3.889881 | false | false | false |
kr41/ggrc-core | test/integration/ggrc/services/test_search.py | 7 | 2305 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Test /search REST API
"""
from ggrc.models import Control
from integration.ggrc import TestCase
from integration.ggrc.api_helper import Api
from integration.ggrc.generator import ObjectGenerator
class TestResource(TestCase):
"""
Test /search REST API
"""
def setUp(self):
TestCase.setUp(self)
self.api = Api()
self.object_generator = ObjectGenerator()
self.create_objects()
def create_objects(self):
"""Create objects to be searched.
Creates five controls and makes relationships.
0 1 2 3 4
|---| |---| |
|-------|-------|
"""
self.objects = [
self.object_generator.generate_object(Control)[1].id
for _ in xrange(5)
]
self.objects = Control.eager_query().filter(
Control.id.in_(self.objects)
).all()
for src, dst in [(0, 1), (0, 2), (2, 3), (2, 4)]:
self.object_generator.generate_relationship(
self.objects[src], self.objects[dst]
)
def search(self, *args, **kwargs):
res, _ = self.api.search(*args, **kwargs)
return res.json["results"]["entries"]
def test_search_all(self):
"""Test search for all objects of a type."""
res, _ = self.api.search("Control")
self.assertEqual(len(res.json["results"]["entries"]), 5)
def test_search_query(self):
"""Test search with query by title."""
entries = self.search("Control", q=self.objects[0].title)
self.assertEqual({entry["id"] for entry in entries},
{self.objects[0].id})
def test_search_relevant(self):
"""Test search with 'relevant to' single object."""
relevant_objects = "Control:{}".format(self.objects[0].id)
entries = self.search("Control", relevant_objects=relevant_objects)
self.assertEqual({entry["id"] for entry in entries},
{self.objects[i].id for i in [1, 2]})
def test_search_relevant_multi(self):
"""Test search with 'relevant to' multiple objects."""
ids = ",".join("Control:{}".format(self.objects[i].id) for i in (0, 3))
entries = self.search("Control", relevant_objects=ids)
self.assertEqual({entry["id"] for entry in entries},
{self.objects[2].id})
| apache-2.0 | -8,425,788,867,269,844,000 | 31.013889 | 78 | 0.62256 | false | 3.629921 | true | false | false |
domenicosolazzo/jroc | tests/jroc/nlp/input/cleaning/test_datacleaner.py | 1 | 2640 | from . import DataCleaner
import unittest
import os
class DataCleanerTestCase(unittest.TestCase):
dataCleaner = None
currentDirectory = currentDirectory = "%s" % (os.path.dirname(os.path.realpath(__file__)), )
testTextsDirectory = "%s/../../../data/text/" % (currentDirectory, )
def setUp(self):
self.dataCleaner = DataCleaner()
def tearDown(self):
self.dataCleaner = None
def test_datacleaner_with_empty_character_list(self):
"""
Check that the data cleaner returns the same text if an empty
list of characters has been given in input
"""
text = "This is a text"
expected = "This is a text"
actual = self.dataCleaner.filterCharacters(characters=[], text=text)
self.assertEqual(expected, actual)
def test_datacleaner_with_character_list(self):
text = "This is a text -"
expected = "This is a text "
actual = self.dataCleaner.filterCharacters(characters=["-"], text=text)
self.assertEqual(expected, actual)
def test_datacleaner_with_default_character_list(self):
text = "This is a text -"
expected = "This is a text "
actual = self.dataCleaner.filterCharacters(text=text)
self.assertEqual(expected, actual)
def test_datacleaner_exception_if_characters_is_not_list(self):
characters = "String"
self.assertRaises(AssertionError, self.dataCleaner.filterCharacters, characters)
def test_datacleaner_replacementcharacter(self):
text = "This is a text -"
replacementCharacter = ""
expected = "This is a text "
actual = self.dataCleaner.filterCharacters(replacement_character=replacementCharacter, text=text)
self.assertEqual(expected, actual)
def test_datacleaner_replacemenent_character_is_not_string(self):
text = "This is a text -"
replacemenentCharacter = 1
expected = "This is a text "
actual = self.dataCleaner.filterCharacters(replacement_character=replacemenentCharacter, text=text)
self.assertEqual(expected, actual)
def test_datacleaner_text_is_not_string(self):
text = 1234
self.assertRaises(AssertionError, self.dataCleaner.filterCharacters, [], "", text)
def helper_readFilename(self, filename=''):
stopwords = []
if not filename:
raise Exception("The file is empty")
fileToRead = "%s%s" % (self.testTextsDirectory, filename)
f = open(fileToRead, 'r')
text = f.read()
f.close()
return text
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -6,571,025,352,266,535,000 | 33.285714 | 107 | 0.647727 | false | 4 | true | false | false |
UManPychron/pychron | pychron/options/define_equilibration.py | 2 | 1394 | # ===============================================================================
# Copyright 2018 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from traits.api import Int, Bool
from pychron.options.aux_plot import AuxPlot
from pychron.options.fit import FitOptions
from pychron.options.views.define_equilibration_views import VIEWS
from pychron.pychron_constants import MAIN, DISPLAY
class DefineEquilibrationAuxPlot(AuxPlot):
equilibration_time = Int(100)
class DefineEquilibrationOptions(FitOptions):
aux_plot_klass = DefineEquilibrationAuxPlot
show_statistics = Bool(False)
ncols = Int
def initialize(self):
self.subview_names = [MAIN, DISPLAY]
def _get_subview(self, name):
return VIEWS[name]
# ============= EOF =============================================
| apache-2.0 | -7,801,087,472,446,347,000 | 35.684211 | 81 | 0.638451 | false | 4.237082 | false | false | false |
otaviobp/eyeD3 | src/eyeD3/binfuncs.py | 6 | 3558 | ################################################################################
#
# Copyright (C) 2002-2005 Travis Shirk <[email protected]>
# Copyright (C) 2001 Ryan Finne <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
# Accepts a string of bytes (chars) and returns an array of bits
# representing the bytes in big endian byte (Most significant byte/bit first)
# order. Each byte can have it's higher bits ignored by passing an sz arg.
def bytes2bin(bytes, sz = 8):
if sz < 1 or sz > 8:
raise ValueError("Invalid sz value: " + str(sz));
retVal = [];
for b in bytes:
bits = [];
b = ord(b);
while b > 0:
bits.append(b & 1);
b >>= 1;
if len(bits) < sz:
bits.extend([0] * (sz - len(bits)));
elif len(bits) > sz:
bits = bits[:sz];
# Big endian byte order.
bits.reverse();
retVal.extend(bits);
if len(retVal) == 0:
retVal = [0];
return retVal;
# Convert am array of bits (MSB first) into a string of characters.
def bin2bytes(x):
bits = [];
bits.extend(x);
bits.reverse();
i = 0;
out = '';
multi = 1;
ttl = 0;
for b in bits:
i += 1;
ttl += b * multi;
multi *= 2;
if i == 8:
i = 0;
out += chr(ttl);
multi = 1;
ttl = 0;
if multi > 1:
out += chr(ttl);
out = list(out);
out.reverse();
out = ''.join(out);
return out;
# Convert and array of "bits" (MSB first) to it's decimal value.
def bin2dec(x):
bits = [];
bits.extend(x);
bits.reverse();
multi = 1;
value = long(0);
for b in bits:
value += b * multi;
multi *= 2;
return value;
def bytes2dec(bytes, sz = 8):
return bin2dec(bytes2bin(bytes, sz));
# Convert a decimal value to an array of bits (MSB first), optionally
# padding the overall size to p bits.
def dec2bin(n, p = 0):
assert(n >= 0)
retVal = [];
while n > 0:
retVal.append(n & 1);
n >>= 1;
if p > 0:
retVal.extend([0] * (p - len(retVal)));
retVal.reverse();
return retVal;
def dec2bytes(n, p = 0):
return bin2bytes(dec2bin(n, p));
# Convert a list of bits (MSB first) to a synch safe list of bits (section 6.2
# of the ID3 2.4 spec).
def bin2synchsafe(x):
if len(x) > 32 or bin2dec(x) > 268435456: # 2^28
raise ValueError("Invalid value");
elif len(x) < 8:
return x;
n = bin2dec(x);
bites = "";
bites += chr((n >> 21) & 0x7f);
bites += chr((n >> 14) & 0x7f);
bites += chr((n >> 7) & 0x7f);
bites += chr((n >> 0) & 0x7f);
bits = bytes2bin(bites);
if len(bits) < 32:
bits = ([0] * (32 - len(x))) + bits;
return bits;
def bytes2str(bytes):
s = ""
for b in bytes:
s += ("\\x%02x" % ord(b))
return s
| gpl-2.0 | 6,598,213,499,006,175,000 | 25.161765 | 80 | 0.55593 | false | 3.279263 | false | false | false |
tensorflow/tpu | models/official/detection/projects/fashionpedia/dataloader/tf_example_decoder.py | 1 | 7709 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tensorflow Example proto decoder for Attribute-Mask R-CNN.
A decoder to decode string tensors containing serialized tensorflow.Example
protos for Attribute-Mask R-CNN model.
"""
import tensorflow.compat.v1 as tf
def _get_source_id_from_encoded_image(parsed_tensors):
return tf.strings.as_string(
tf.strings.to_hash_bucket_fast(parsed_tensors['image/encoded'],
2**63 - 1))
class TfExampleDecoder(object):
"""Tensorflow Example proto decoder."""
def __init__(self, include_mask=False, regenerate_source_id=False):
self._include_mask = include_mask
self._regenerate_source_id = regenerate_source_id
self._keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string),
'image/source_id': tf.FixedLenFeature((), tf.string, ''),
'image/height': tf.FixedLenFeature((), tf.int64, -1),
'image/width': tf.FixedLenFeature((), tf.int64, -1),
'image/object/bbox/xmin': tf.VarLenFeature(tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(tf.float32),
'image/object/class/label': tf.VarLenFeature(tf.int64),
'image/object/attribute/label': tf.VarLenFeature(tf.int64),
'image/object/area': tf.VarLenFeature(tf.float32),
'image/object/is_crowd': tf.VarLenFeature(tf.int64),
}
if include_mask:
self._keys_to_features.update({
'image/object/mask':
tf.VarLenFeature(tf.string),
})
def _decode_image(self, parsed_tensors):
"""Decodes the image and set its static shape."""
image = tf.io.decode_image(parsed_tensors['image/encoded'], channels=3)
image.set_shape([None, None, 3])
return image
def _decode_boxes(self, parsed_tensors):
"""Concat box coordinates in the format of [ymin, xmin, ymax, xmax]."""
xmin = parsed_tensors['image/object/bbox/xmin']
xmax = parsed_tensors['image/object/bbox/xmax']
ymin = parsed_tensors['image/object/bbox/ymin']
ymax = parsed_tensors['image/object/bbox/ymax']
return tf.stack([ymin, xmin, ymax, xmax], axis=-1)
def _decode_masks(self, parsed_tensors):
"""Decode a set of PNG masks to the tf.float32 tensors."""
def _decode_png_mask(png_bytes):
mask = tf.squeeze(
tf.io.decode_png(png_bytes, channels=1, dtype=tf.uint8), axis=-1)
mask = tf.cast(mask, dtype=tf.float32)
mask.set_shape([None, None])
return mask
height = parsed_tensors['image/height']
width = parsed_tensors['image/width']
masks = parsed_tensors['image/object/mask']
return tf.cond(
tf.greater(tf.size(masks), 0),
lambda: tf.map_fn(_decode_png_mask, masks, dtype=tf.float32),
lambda: tf.zeros([0, height, width], dtype=tf.float32))
def _decode_areas(self, parsed_tensors):
xmin = parsed_tensors['image/object/bbox/xmin']
xmax = parsed_tensors['image/object/bbox/xmax']
ymin = parsed_tensors['image/object/bbox/ymin']
ymax = parsed_tensors['image/object/bbox/ymax']
height = tf.cast(parsed_tensors['image/height'], dtype=tf.float32)
width = tf.cast(parsed_tensors['image/width'], dtype=tf.float32)
return tf.cond(
tf.greater(tf.shape(parsed_tensors['image/object/area'])[0], 0),
lambda: parsed_tensors['image/object/area'],
lambda: (xmax - xmin) * (ymax - ymin) * height * width)
def decode(self, serialized_example):
"""Decode the serialized example.
Args:
serialized_example: a single serialized tf.Example string.
Returns:
decoded_tensors: a dictionary of tensors with the following fields:
- image: a uint8 tensor of shape [None, None, 3].
- source_id: a string scalar tensor.
- height: an integer scalar tensor.
- width: an integer scalar tensor.
- groundtruth_classes: an int64 tensor of shape [None].
- groundtruth_attributes: an int64 tensor of shape [None].
- groundtruth_is_crowd: a bool tensor of shape [None].
- groundtruth_area: a float32 tensor of shape [None].
- groundtruth_boxes: a float32 tensor of shape [None, 4].
- groundtruth_instance_masks: a float32 tensor of shape
[None, None, None].
- groundtruth_instance_masks_png: a string tensor of shape [None].
"""
parsed_tensors = tf.io.parse_single_example(
serialized_example, self._keys_to_features)
for k in parsed_tensors:
if isinstance(parsed_tensors[k], tf.SparseTensor):
if parsed_tensors[k].dtype == tf.string:
parsed_tensors[k] = tf.sparse_tensor_to_dense(
parsed_tensors[k], default_value='')
else:
parsed_tensors[k] = tf.sparse_tensor_to_dense(
parsed_tensors[k], default_value=0)
image = self._decode_image(parsed_tensors)
boxes = self._decode_boxes(parsed_tensors)
areas = self._decode_areas(parsed_tensors)
decode_image_shape = tf.logical_or(
tf.equal(parsed_tensors['image/height'], -1),
tf.equal(parsed_tensors['image/width'], -1))
image_shape = tf.cast(tf.shape(image), dtype=tf.int64)
parsed_tensors['image/height'] = tf.where(decode_image_shape,
image_shape[0],
parsed_tensors['image/height'])
parsed_tensors['image/width'] = tf.where(decode_image_shape, image_shape[1],
parsed_tensors['image/width'])
is_crowds = tf.cond(
tf.greater(tf.shape(parsed_tensors['image/object/is_crowd'])[0], 0),
lambda: tf.cast(parsed_tensors['image/object/is_crowd'], dtype=tf.bool),
lambda: tf.zeros_like(parsed_tensors['image/object/class/label'], dtype=tf.bool)) # pylint: disable=line-too-long
if self._regenerate_source_id:
source_id = _get_source_id_from_encoded_image(parsed_tensors)
else:
source_id = tf.cond(
tf.greater(tf.strings.length(parsed_tensors['image/source_id']),
0), lambda: parsed_tensors['image/source_id'],
lambda: _get_source_id_from_encoded_image(parsed_tensors))
if self._include_mask:
masks = self._decode_masks(parsed_tensors)
decoded_tensors = {
'image': image,
'source_id': source_id,
'height': parsed_tensors['image/height'],
'width': parsed_tensors['image/width'],
'groundtruth_classes': parsed_tensors['image/object/class/label'],
'groundtruth_attributes':
parsed_tensors['image/object/attribute/label'],
'groundtruth_is_crowd': is_crowds,
'groundtruth_area': areas,
'groundtruth_boxes': boxes,
}
if self._include_mask:
decoded_tensors.update({
'groundtruth_instance_masks': masks,
'groundtruth_instance_masks_png': parsed_tensors['image/object/mask'],
})
return decoded_tensors
| apache-2.0 | -6,247,439,793,533,594,000 | 42.553672 | 122 | 0.636918 | false | 3.679714 | false | false | false |
MCLConsortium/mcl-site | src/jpl.mcl.site.knowledge/src/jpl/mcl/site/knowledge/tests/test_setup.py | 1 | 1263 | # encoding: utf-8
u'''MCL Site Knowledge — setup tests'''
from jpl.mcl.site.knowledge.testing import JPL_MCL_SITE_KNOWLEDGE_INTEGRATION_TESTING
import unittest, plone.api
class SetupTest(unittest.TestCase):
layer = JPL_MCL_SITE_KNOWLEDGE_INTEGRATION_TESTING
def setUp(self):
super(SetupTest, self).setUp()
self.portal = self.layer['portal']
def testCatalogIndexes(self):
u'''Ensure the catalog has our custom indexes'''
catalog = plone.api.portal.get_tool('portal_catalog')
indexes = catalog.indexes()
for index in ('subjectURI', 'phone', 'homepage', 'dcbflag', 'dcpflag'):
self.assertTrue(index in indexes, u'"{}" index not installed'.format(index))
def testCatalogMetadata(self):
u'''Check that the catalog has our custom metadata columns'''
catalog = plone.api.portal.get_tool('portal_catalog')
columns = catalog.schema()
for column in ('subjectURI', 'phone', 'homepage', 'dcbflag', 'dcpflag'):
self.assertTrue(column in columns, u'"{}" column not installed'.format(column))
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| apache-2.0 | -8,871,059,834,431,624,000 | 37.212121 | 91 | 0.666931 | false | 3.687135 | true | false | false |
eustislab/horton | doc/update_hf_dft_examples.py | 1 | 1727 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2015 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
#!/usr/bin/env python
'''Update the table with HF/DFT examples based on files in data/examples/hf_dft.'''
from glob import glob
from cStringIO import StringIO
import json, os
from common import write_rst_table, write_if_changed
cases = []
for fn_py in sorted(glob('../data/examples/hf_dft/*.py')):
with open(fn_py) as f:
s = ''
for line in f:
if line.startswith('#JSON'):
s += line[5:]
if len(s) == 0:
raise RuntimeError('Could not find JSON line in HF/DFT example script.')
meta = json.loads(s)
case = [meta['difficulty'], os.path.basename(fn_py), meta['lot'],
meta['scf'], meta['description']]
cases.append(case)
cases.sort()
table = [['File name', 'LOT', 'SCF', 'Description']] + [case[1:] for case in cases]
f = StringIO()
write_rst_table(f, table)
write_if_changed('hf_dft_examples.rst.inc', f.getvalue())
| gpl-3.0 | 7,490,750,100,571,261,000 | 34.244898 | 83 | 0.68095 | false | 3.474849 | false | false | false |
DigitalPublishingToolkit/Society-of-the-Query-Reader | scripts/index_documents.py | 1 | 1267 | import os
import html5lib
from xml.etree import ElementTree as etree
from whoosh.index import open_dir
ix = open_dir("index")
with ix.writer() as writer:
for root, dirs, files in os.walk('essays'):
# print root, files
if 'index.html' in files:
fp = os.path.join(root, 'index.html')
print fp
src = open(fp).read().decode("utf-8")
if type(fp) != unicode:
fp = fp.decode("utf-8")
tree = html5lib.parse(src, namespaceHTMLElements=False)
paragraphs = tree.findall(".//p")
try:
title = paragraphs[0].text.strip()
except AttributeError:
title = u""
authors = u""
# if paragraphs[1].text:
# authors = paragraphs[1].text.strip()
text = etree.tostring(tree, method="text", encoding="utf-8").decode("utf-8")
# lines = text.splitlines()
# for i, x in enumerate(lines[:10]):
# print i, x
# print
# print u"Indexing {0}".format(title).encode("utf-8")
writer.add_document(title=title, type=u"essay", authors=authors, path=fp, content=text, ncontent=title+u" "+text)
# writer.commit()
| gpl-3.0 | -1,969,048,108,103,752,200 | 36.264706 | 125 | 0.53749 | false | 3.839394 | false | false | false |
barisumog/pyllage | pyllage/__init__.py | 1 | 1917 | #
# -*- coding: utf-8 -*-
#
# pyllage
#
# Copyright (C) 2013 barisumog at gmail.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from .http import get
from .parser import parse
from .selectors import choose, relative, rip_data, rip_index, between
from .utils import stack_to_file, get_stack
__title__ = "pyllage"
__version__ = "0.1.2"
__author__ = "barisumog at gmail.com"
__copyright__ = "copyright 2013 barisumog"
__license__ = "GPLv3"
__doc__ = """Please see the README.rst for full documentation.
Quick Start
-----------
Here's a few quick examples illustrating *some* of the functions::
import pyllage
stack = pyllage.get_stack("http://somesite.com/etcetera")
# get all links, print the href=... parts
links = pyllage.choose(stack, tag="a")
for key in links:
print(links[key]["attrs"])
# get all text data except scripts and print it
texts = pyllage.choose(stack, tag="script", select=False)
data = pyllage.rip_data(texts)
print("\n".join(data))
# get all spans and divs with class=help (but not class=helpmore)
helps = pyllage.choose(stack, tag="span div", attrs="class=help", exact=True)
# get all divs containing the word pyllage in their text part
pylls = pyllage.choose(stack, tag="div", data="pyllage")
""" | gpl-3.0 | 4,734,646,366,030,803,000 | 29.444444 | 81 | 0.684924 | false | 3.454054 | false | false | false |
vhaupert/mitmproxy | mitmproxy/proxy/protocol/http2.py | 1 | 30035 | import threading
import time
import functools
from typing import Dict, Callable, Any, List, Optional # noqa
import h2.exceptions
from h2 import connection
from h2 import events
import queue
from mitmproxy import connections # noqa
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.proxy.protocol import base
from mitmproxy.proxy.protocol import http as httpbase
import mitmproxy.net.http
from mitmproxy.net import tcp
from mitmproxy.coretypes import basethread
from mitmproxy.net.http import http2, headers, url
from mitmproxy.utils import human
class SafeH2Connection(connection.H2Connection):
def __init__(self, conn, *args, **kwargs):
super().__init__(*args, **kwargs)
self.conn = conn
self.lock = threading.RLock()
def safe_acknowledge_received_data(self, acknowledged_size: int, stream_id: int):
if acknowledged_size == 0:
return
with self.lock:
self.acknowledge_received_data(acknowledged_size, stream_id)
self.conn.send(self.data_to_send())
def safe_reset_stream(self, stream_id: int, error_code: int):
with self.lock:
try:
self.reset_stream(stream_id, error_code)
except h2.exceptions.StreamClosedError: # pragma: no cover
# stream is already closed - good
pass
self.conn.send(self.data_to_send())
def safe_update_settings(self, new_settings: Dict[int, Any]):
with self.lock:
self.update_settings(new_settings)
self.conn.send(self.data_to_send())
def safe_send_headers(self, raise_zombie: Callable, stream_id: int, headers: headers.Headers, **kwargs):
with self.lock:
raise_zombie()
self.send_headers(stream_id, headers.fields, **kwargs)
self.conn.send(self.data_to_send())
def safe_send_body(self, raise_zombie: Callable, stream_id: int, chunks: List[bytes], end_stream=True):
for chunk in chunks:
position = 0
while position < len(chunk):
self.lock.acquire()
raise_zombie(self.lock.release)
max_outbound_frame_size = self.max_outbound_frame_size
frame_chunk = chunk[position:position + max_outbound_frame_size]
if self.local_flow_control_window(stream_id) < len(frame_chunk): # pragma: no cover
self.lock.release()
time.sleep(0.1)
continue
self.send_data(stream_id, frame_chunk)
try:
self.conn.send(self.data_to_send())
except Exception as e: # pragma: no cover
raise e
finally:
self.lock.release()
position += max_outbound_frame_size
if end_stream:
with self.lock:
raise_zombie()
self.end_stream(stream_id)
self.conn.send(self.data_to_send())
class Http2Layer(base.Layer):
if False:
# mypy type hints
client_conn: connections.ClientConnection = None
class H2ConnLogger:
def __init__(self, name, log):
self.name = name
self.log = log
def debug(self, fmtstr, *args):
msg = "H2Conn {}: {}".format(self.name, fmtstr % args)
self.log(msg, "debug")
def trace(self, fmtstr, *args):
pass
def __init__(self, ctx, mode: str) -> None:
super().__init__(ctx)
self.mode = mode
self.streams: Dict[int, Http2SingleStreamLayer] = dict()
self.server_to_client_stream_ids: Dict[int, int] = dict([(0, 0)])
self.connections: Dict[object, SafeH2Connection] = {}
config = h2.config.H2Configuration(
client_side=False,
header_encoding=False,
validate_outbound_headers=False,
validate_inbound_headers=False,
logger=self.H2ConnLogger("client", self.log))
self.connections[self.client_conn] = SafeH2Connection(self.client_conn, config=config)
def _initiate_server_conn(self):
if self.server_conn.connected():
config = h2.config.H2Configuration(
client_side=True,
header_encoding=False,
validate_outbound_headers=False,
validate_inbound_headers=False,
logger=self.H2ConnLogger("server", self.log))
self.connections[self.server_conn] = SafeH2Connection(self.server_conn, config=config)
self.connections[self.server_conn].initiate_connection()
self.server_conn.send(self.connections[self.server_conn].data_to_send())
def _complete_handshake(self):
preamble = self.client_conn.rfile.read(24)
self.connections[self.client_conn].initiate_connection()
self.connections[self.client_conn].receive_data(preamble)
self.client_conn.send(self.connections[self.client_conn].data_to_send())
def next_layer(self): # pragma: no cover
# WebSocket over HTTP/2?
# CONNECT for proxying?
raise NotImplementedError()
def _handle_event(self, event, source_conn, other_conn, is_server):
self.log(
"HTTP2 Event from {}".format("server" if is_server else "client"),
"debug",
[repr(event)]
)
eid = None
if hasattr(event, 'stream_id'):
if is_server and event.stream_id % 2 == 1:
eid = self.server_to_client_stream_ids[event.stream_id]
else:
eid = event.stream_id
if isinstance(event, events.RequestReceived):
return self._handle_request_received(eid, event)
elif isinstance(event, events.ResponseReceived):
return self._handle_response_received(eid, event)
elif isinstance(event, events.DataReceived):
return self._handle_data_received(eid, event, source_conn)
elif isinstance(event, events.StreamEnded):
return self._handle_stream_ended(eid)
elif isinstance(event, events.StreamReset):
return self._handle_stream_reset(eid, event, is_server, other_conn)
elif isinstance(event, events.RemoteSettingsChanged):
return self._handle_remote_settings_changed(event, other_conn)
elif isinstance(event, events.ConnectionTerminated):
return self._handle_connection_terminated(event, is_server)
elif isinstance(event, events.PushedStreamReceived):
return self._handle_pushed_stream_received(event)
elif isinstance(event, events.PriorityUpdated):
return self._handle_priority_updated(eid, event)
elif isinstance(event, events.TrailersReceived):
return self._handle_trailers(eid, event, is_server, other_conn)
# fail-safe for unhandled events
return True
def _handle_request_received(self, eid, event):
headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers])
self.streams[eid] = Http2SingleStreamLayer(self, self.connections[self.client_conn], eid, headers)
self.streams[eid].timestamp_start = time.time()
if event.priority_updated is not None:
self.streams[eid].priority_exclusive = event.priority_updated.exclusive
self.streams[eid].priority_depends_on = event.priority_updated.depends_on
self.streams[eid].priority_weight = event.priority_updated.weight
self.streams[eid].handled_priority_event = event.priority_updated
self.streams[eid].start()
self.streams[eid].request_message.arrived.set()
return True
def _handle_response_received(self, eid, event):
headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers])
self.streams[eid].queued_data_length = 0
self.streams[eid].timestamp_start = time.time()
self.streams[eid].response_message.headers = headers
self.streams[eid].response_message.arrived.set()
return True
def _handle_data_received(self, eid, event, source_conn):
bsl = human.parse_size(self.config.options.body_size_limit)
if bsl and self.streams[eid].queued_data_length > bsl:
self.streams[eid].kill()
self.connections[source_conn].safe_reset_stream(
event.stream_id,
h2.errors.ErrorCodes.REFUSED_STREAM
)
self.log("HTTP body too large. Limit is {}.".format(bsl), "info")
else:
self.streams[eid].data_queue.put(event.data)
self.streams[eid].queued_data_length += len(event.data)
# always acknowledge receved data with a WINDOW_UPDATE frame
self.connections[source_conn].safe_acknowledge_received_data(
event.flow_controlled_length,
event.stream_id
)
return True
def _handle_stream_ended(self, eid):
self.streams[eid].timestamp_end = time.time()
self.streams[eid].stream_ended.set()
return True
def _handle_stream_reset(self, eid, event, is_server, other_conn):
if eid in self.streams:
self.streams[eid].kill()
if is_server:
other_stream_id = self.streams[eid].client_stream_id
else:
other_stream_id = self.streams[eid].server_stream_id
if other_stream_id is not None:
self.connections[other_conn].safe_reset_stream(other_stream_id, event.error_code)
return True
def _handle_trailers(self, eid, event, is_server, other_conn):
trailers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers])
self.streams[eid].trailers = trailers
return True
def _handle_remote_settings_changed(self, event, other_conn):
new_settings = dict([(key, cs.new_value) for (key, cs) in event.changed_settings.items()])
self.connections[other_conn].safe_update_settings(new_settings)
return True
def _handle_connection_terminated(self, event, is_server):
self.log("HTTP/2 connection terminated by {}: error code: {}, last stream id: {}, additional data: {}".format(
"server" if is_server else "client",
event.error_code,
event.last_stream_id,
event.additional_data), "info")
if event.error_code != h2.errors.ErrorCodes.NO_ERROR:
# Something terrible has happened - kill everything!
self.connections[self.client_conn].close_connection(
error_code=event.error_code,
last_stream_id=event.last_stream_id,
additional_data=event.additional_data
)
self.client_conn.send(self.connections[self.client_conn].data_to_send())
self._kill_all_streams()
else:
"""
Do not immediately terminate the other connection.
Some streams might be still sending data to the client.
"""
return False
def _handle_pushed_stream_received(self, event):
# pushed stream ids should be unique and not dependent on race conditions
# only the parent stream id must be looked up first
parent_eid = self.server_to_client_stream_ids[event.parent_stream_id]
with self.connections[self.client_conn].lock:
self.connections[self.client_conn].push_stream(parent_eid, event.pushed_stream_id, event.headers)
self.client_conn.send(self.connections[self.client_conn].data_to_send())
headers = mitmproxy.net.http.Headers([[k, v] for k, v in event.headers])
layer = Http2SingleStreamLayer(self, self.connections[self.client_conn], event.pushed_stream_id, headers)
self.streams[event.pushed_stream_id] = layer
self.streams[event.pushed_stream_id].timestamp_start = time.time()
self.streams[event.pushed_stream_id].pushed = True
self.streams[event.pushed_stream_id].parent_stream_id = parent_eid
self.streams[event.pushed_stream_id].timestamp_end = time.time()
self.streams[event.pushed_stream_id].request_message.arrived.set()
self.streams[event.pushed_stream_id].request_message.stream_ended.set()
self.streams[event.pushed_stream_id].start()
return True
def _handle_priority_updated(self, eid, event):
if not self.config.options.http2_priority:
self.log("HTTP/2 PRIORITY frame suppressed. Use --http2-priority to enable forwarding.", "debug")
return True
if eid in self.streams and self.streams[eid].handled_priority_event is event:
# this event was already handled during stream creation
# HeadersFrame + Priority information as RequestReceived
return True
with self.connections[self.server_conn].lock:
mapped_stream_id = event.stream_id
if mapped_stream_id in self.streams and self.streams[mapped_stream_id].server_stream_id:
# if the stream is already up and running and was sent to the server,
# use the mapped server stream id to update priority information
mapped_stream_id = self.streams[mapped_stream_id].server_stream_id
if eid in self.streams:
self.streams[eid].priority_exclusive = event.exclusive
self.streams[eid].priority_depends_on = event.depends_on
self.streams[eid].priority_weight = event.weight
self.connections[self.server_conn].prioritize(
mapped_stream_id,
weight=event.weight,
depends_on=self._map_depends_on_stream_id(mapped_stream_id, event.depends_on),
exclusive=event.exclusive
)
self.server_conn.send(self.connections[self.server_conn].data_to_send())
return True
def _map_depends_on_stream_id(self, stream_id, depends_on):
mapped_depends_on = depends_on
if mapped_depends_on in self.streams and self.streams[mapped_depends_on].server_stream_id:
# if the depends-on-stream is already up and running and was sent to the server
# use the mapped server stream id to update priority information
mapped_depends_on = self.streams[mapped_depends_on].server_stream_id
if stream_id == mapped_depends_on:
# looks like one of the streams wasn't opened yet
# prevent self-dependent streams which result in ProtocolError
mapped_depends_on += 2
return mapped_depends_on
def _cleanup_streams(self):
death_time = time.time() - 10
zombie_streams = [(stream_id, stream) for stream_id, stream in list(self.streams.items()) if stream.zombie]
outdated_streams = [stream_id for stream_id, stream in zombie_streams if stream.zombie <= death_time]
for stream_id in outdated_streams: # pragma: no cover
self.streams.pop(stream_id, None)
def _kill_all_streams(self):
for stream in self.streams.values():
stream.kill()
def __call__(self):
self._initiate_server_conn()
self._complete_handshake()
conns = [c.connection for c in self.connections.keys()]
try:
while True:
r = tcp.ssl_read_select(conns, 0.1)
for conn in r:
source_conn = self.client_conn if conn == self.client_conn.connection else self.server_conn
other_conn = self.server_conn if conn == self.client_conn.connection else self.client_conn
is_server = (source_conn == self.server_conn)
with self.connections[source_conn].lock:
try:
raw_frame = b''.join(http2.read_raw_frame(source_conn.rfile))
except:
# read frame failed: connection closed
self._kill_all_streams()
return
if self.connections[source_conn].state_machine.state == h2.connection.ConnectionState.CLOSED:
self.log("HTTP/2 connection entered closed state already", "debug")
return
incoming_events = self.connections[source_conn].receive_data(raw_frame)
source_conn.send(self.connections[source_conn].data_to_send())
for event in incoming_events:
if not self._handle_event(event, source_conn, other_conn, is_server):
# connection terminated: GoAway
self._kill_all_streams()
return
self._cleanup_streams()
except Exception as e: # pragma: no cover
self.log(repr(e), "info")
self._kill_all_streams()
def detect_zombie_stream(func): # pragma: no cover
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
self.raise_zombie()
result = func(self, *args, **kwargs)
self.raise_zombie()
return result
return wrapper
class Http2SingleStreamLayer(httpbase._HttpTransmissionLayer, basethread.BaseThread):
class Message:
def __init__(self, headers=None):
self.headers: Optional[mitmproxy.net.http.Headers] = headers # headers are the first thing to be received on a new stream
self.data_queue: queue.Queue[bytes] = queue.Queue() # contains raw contents of DATA frames
self.queued_data_length = 0 # used to enforce mitmproxy's config.options.body_size_limit
self.trailers: Optional[mitmproxy.net.http.Headers] = None # trailers are received after stream_ended is set
self.arrived = threading.Event() # indicates the HEADERS+CONTINUTATION frames have been received
self.stream_ended = threading.Event() # indicates the a frame with the END_STREAM flag has been received
def __init__(self, ctx, h2_connection, stream_id: int, request_headers: mitmproxy.net.http.Headers) -> None:
super().__init__(
ctx, name="Http2SingleStreamLayer-{}".format(stream_id)
)
self.h2_connection = h2_connection
self.zombie: Optional[float] = None
self.client_stream_id: int = stream_id
self.server_stream_id: Optional[int] = None
self.pushed = False
self.timestamp_start: Optional[float] = None
self.timestamp_end: Optional[float] = None
self.request_message = self.Message(request_headers)
self.response_message = self.Message()
self.priority_exclusive: bool
self.priority_depends_on: Optional[int] = None
self.priority_weight: Optional[int] = None
self.handled_priority_event: Any = None
def kill(self):
if not self.zombie:
self.zombie = time.time()
self.request_message.stream_ended.set()
self.request_message.arrived.set()
self.response_message.arrived.set()
self.response_message.stream_ended.set()
def connect(self): # pragma: no cover
raise exceptions.Http2ProtocolException("HTTP2 layer should already have a connection.")
def disconnect(self): # pragma: no cover
raise exceptions.Http2ProtocolException("Cannot dis- or reconnect in HTTP2 connections.")
def set_server(self, address): # pragma: no cover
raise exceptions.SetServerNotAllowedException(repr(address))
def check_close_connection(self, flow):
# This layer only handles a single stream.
# RFC 7540 8.1: An HTTP request/response exchange fully consumes a single stream.
return True
@property
def data_queue(self):
if self.response_message.arrived.is_set():
return self.response_message.data_queue
else:
return self.request_message.data_queue
@property
def queued_data_length(self):
if self.response_message.arrived.is_set():
return self.response_message.queued_data_length
else:
return self.request_message.queued_data_length
@queued_data_length.setter
def queued_data_length(self, v):
self.request_message.queued_data_length = v
@property
def stream_ended(self):
# This indicates that all message headers, the full message body, and all trailers have been received
# https://tools.ietf.org/html/rfc7540#section-8.1
if self.response_message.arrived.is_set():
return self.response_message.stream_ended
else:
return self.request_message.stream_ended
@property
def trailers(self):
if self.response_message.arrived.is_set():
return self.response_message.trailers
else:
return self.request_message.trailers
@trailers.setter
def trailers(self, v):
if self.response_message.arrived.is_set():
self.response_message.trailers = v
else:
self.request_message.trailers = v
def raise_zombie(self, pre_command=None): # pragma: no cover
connection_closed = self.h2_connection.state_machine.state == h2.connection.ConnectionState.CLOSED
if self.zombie is not None or connection_closed:
if pre_command is not None:
pre_command()
raise exceptions.Http2ZombieException("Connection or stream already dead: {}, {}".format(self.zombie, connection_closed))
@detect_zombie_stream
def read_request_headers(self, flow):
self.request_message.arrived.wait()
self.raise_zombie()
if self.pushed:
flow.metadata['h2-pushed-stream'] = True
# pseudo header must be present, see https://http2.github.io/http2-spec/#rfc.section.8.1.2.3
authority = self.request_message.headers.pop(':authority', "")
method = self.request_message.headers.pop(':method')
scheme = self.request_message.headers.pop(':scheme')
path = self.request_message.headers.pop(':path')
host, port = url.parse_authority(authority, check=True)
port = port or url.default_port(scheme) or 0
return http.HTTPRequest(
host,
port,
method.encode(),
scheme.encode(),
authority.encode(),
path.encode(),
b"HTTP/2.0",
self.request_message.headers,
None,
None,
self.timestamp_start,
self.timestamp_end,
)
@detect_zombie_stream
def read_request_body(self, request):
if not request.stream:
self.request_message.stream_ended.wait()
while True:
try:
yield self.request_message.data_queue.get(timeout=0.1)
except queue.Empty: # pragma: no cover
pass
if self.request_message.stream_ended.is_set():
self.raise_zombie()
while self.request_message.data_queue.qsize() > 0:
yield self.request_message.data_queue.get()
break
self.raise_zombie()
@detect_zombie_stream
def read_request_trailers(self, request):
return self.request_message.trailers
@detect_zombie_stream
def send_request_headers(self, request):
if self.pushed:
# nothing to do here
return
while True:
self.raise_zombie()
self.connections[self.server_conn].lock.acquire()
max_streams = self.connections[self.server_conn].remote_settings.max_concurrent_streams
if self.connections[self.server_conn].open_outbound_streams + 1 >= max_streams:
# wait until we get a free slot for a new outgoing stream
self.connections[self.server_conn].lock.release()
time.sleep(0.1)
continue
# keep the lock
break
# We must not assign a stream id if we are already a zombie.
self.raise_zombie()
self.server_stream_id = self.connections[self.server_conn].get_next_available_stream_id()
self.server_to_client_stream_ids[self.server_stream_id] = self.client_stream_id
headers = request.headers.copy()
if request.authority:
headers.insert(0, ":authority", request.authority)
headers.insert(0, ":path", request.path)
headers.insert(0, ":method", request.method)
headers.insert(0, ":scheme", request.scheme)
priority_exclusive = None
priority_depends_on = None
priority_weight = None
if self.handled_priority_event:
# only send priority information if they actually came with the original HeadersFrame
# and not if they got updated before/after with a PriorityFrame
if not self.config.options.http2_priority:
self.log("HTTP/2 PRIORITY information in HEADERS frame suppressed. Use --http2-priority to enable forwarding.", "debug")
else:
priority_exclusive = self.priority_exclusive
priority_depends_on = self._map_depends_on_stream_id(self.server_stream_id, self.priority_depends_on)
priority_weight = self.priority_weight
try:
self.connections[self.server_conn].safe_send_headers(
self.raise_zombie,
self.server_stream_id,
headers,
priority_exclusive=priority_exclusive,
priority_depends_on=priority_depends_on,
priority_weight=priority_weight,
)
except Exception as e: # pragma: no cover
raise e
finally:
self.raise_zombie()
self.connections[self.server_conn].lock.release()
@detect_zombie_stream
def send_request_body(self, request, chunks):
if self.pushed:
# nothing to do here
return
self.connections[self.server_conn].safe_send_body(
self.raise_zombie,
self.server_stream_id,
chunks,
end_stream=(request.trailers is None),
)
@detect_zombie_stream
def send_request_trailers(self, request):
self._send_trailers(self.server_conn, request.trailers)
@detect_zombie_stream
def send_request(self, request):
self.send_request_headers(request)
self.send_request_body(request, [request.content])
self.send_request_trailers(request)
@detect_zombie_stream
def read_response_headers(self):
self.response_message.arrived.wait()
self.raise_zombie()
status_code = int(self.response_message.headers.get(':status', 502))
headers = self.response_message.headers.copy()
headers.pop(":status", None)
return http.HTTPResponse(
http_version=b"HTTP/2.0",
status_code=status_code,
reason=b'',
headers=headers,
content=None,
trailers=None,
timestamp_start=self.timestamp_start,
timestamp_end=self.timestamp_end,
)
@detect_zombie_stream
def read_response_body(self, request, response):
while True:
try:
yield self.response_message.data_queue.get(timeout=0.1)
except queue.Empty: # pragma: no cover
pass
if self.response_message.stream_ended.is_set():
self.raise_zombie()
while self.response_message.data_queue.qsize() > 0:
yield self.response_message.data_queue.get()
break
self.raise_zombie()
@detect_zombie_stream
def read_response_trailers(self, request, response):
return self.response_message.trailers
@detect_zombie_stream
def send_response_headers(self, response):
headers = response.headers.copy()
headers.insert(0, ":status", str(response.status_code))
with self.connections[self.client_conn].lock:
self.connections[self.client_conn].safe_send_headers(
self.raise_zombie,
self.client_stream_id,
headers
)
@detect_zombie_stream
def send_response_body(self, response, chunks):
self.connections[self.client_conn].safe_send_body(
self.raise_zombie,
self.client_stream_id,
chunks,
end_stream=(response.trailers is None),
)
@detect_zombie_stream
def send_response_trailers(self, response):
self._send_trailers(self.client_conn, response.trailers)
def _send_trailers(self, conn, trailers):
if not trailers:
return
with self.connections[conn].lock:
self.connections[conn].safe_send_headers(
self.raise_zombie,
self.client_stream_id,
trailers,
end_stream=True
)
def __call__(self): # pragma: no cover
raise EnvironmentError('Http2SingleStreamLayer must be run as thread')
def run(self):
layer = httpbase.HttpLayer(self, self.mode)
try:
layer()
except exceptions.Http2ZombieException: # pragma: no cover
# zombies can be safely terminated - no need to kill them twice
return
except exceptions.ProtocolException as e: # pragma: no cover
self.log(repr(e), "info")
except exceptions.SetServerNotAllowedException as e: # pragma: no cover
self.log("Changing the Host server for HTTP/2 connections not allowed: {}".format(e), "info")
except exceptions.Kill: # pragma: no cover
self.log("Connection killed", "info")
self.kill()
| mit | 6,689,100,719,969,246,000 | 40.143836 | 136 | 0.610488 | false | 4.001999 | true | false | false |
yuxans/badgirl | src/fortune.py | 1 | 1513 | #!/usr/bin/env python
# Copyright (c) 2003 Daniel DiPaolo
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
handler_list=["fortune", "excuse"]
from moobot_module import MooBotModule
class fortune(MooBotModule):
def __init__(self):
self.regex="^fortune$"
def handler(self, **args):
"""Grabs a fortune and spits it out"""
import os
from irclib import Event
fortune_txt = os.popen("fortune -s").read()
return Event("privmsg", "", self.return_to_sender(args), [ fortune_txt ])
class excuse(MooBotModule):
def __init__(self):
self.regex="^excuse$"
def handler(self, **args):
"""Grabs an excuse from the bofh fortune file and spits it out"""
import os
from irclib import Event
fortune_txt = os.popen("fortune bofh-excuses|tail --lines=+2").read()
return Event("privmsg", "", self.return_to_sender(args), [ fortune_txt ])
| gpl-2.0 | -9,076,864,932,055,481,000 | 30.520833 | 77 | 0.716457 | false | 3.332599 | false | false | false |
ActiveState/code | recipes/Python/93025_Speech_recognitiWindows_using_MS_Speech/recipe-93025.py | 1 | 3325 | from win32com.client import constants
import win32com.client
import pythoncom
"""Sample code for using the Microsoft Speech SDK 5.1 via COM in Python.
Requires that the SDK be installed; it's a free download from
http://microsoft.com/speech
and that MakePy has been used on it (in PythonWin,
select Tools | COM MakePy Utility | Microsoft Speech Object Library 5.1).
After running this, then saying "One", "Two", "Three" or "Four" should
display "You said One" etc on the console. The recognition can be a bit
shaky at first until you've trained it (via the Speech entry in the Windows
Control Panel."""
class SpeechRecognition:
""" Initialize the speech recognition with the passed in list of words """
def __init__(self, wordsToAdd):
# For text-to-speech
self.speaker = win32com.client.Dispatch("SAPI.SpVoice")
# For speech recognition - first create a listener
self.listener = win32com.client.Dispatch("SAPI.SpSharedRecognizer")
# Then a recognition context
self.context = self.listener.CreateRecoContext()
# which has an associated grammar
self.grammar = self.context.CreateGrammar()
# Do not allow free word recognition - only command and control
# recognizing the words in the grammar only
self.grammar.DictationSetState(0)
# Create a new rule for the grammar, that is top level (so it begins
# a recognition) and dynamic (ie we can change it at runtime)
self.wordsRule = self.grammar.Rules.Add("wordsRule",
constants.SRATopLevel + constants.SRADynamic, 0)
# Clear the rule (not necessary first time, but if we're changing it
# dynamically then it's useful)
self.wordsRule.Clear()
# And go through the list of words, adding each to the rule
[ self.wordsRule.InitialState.AddWordTransition(None, word) for word in wordsToAdd ]
# Set the wordsRule to be active
self.grammar.Rules.Commit()
self.grammar.CmdSetRuleState("wordsRule", 1)
# Commit the changes to the grammar
self.grammar.Rules.Commit()
# And add an event handler that's called back when recognition occurs
self.eventHandler = ContextEvents(self.context)
# Announce we've started using speech synthesis
self.say("Started successfully")
"""Speak a word or phrase"""
def say(self, phrase):
self.speaker.Speak(phrase)
"""The callback class that handles the events raised by the speech object.
See "Automation | SpSharedRecoContext (Events)" in the MS Speech SDK
online help for documentation of the other events supported. """
class ContextEvents(win32com.client.getevents("SAPI.SpSharedRecoContext")):
"""Called when a word/phrase is successfully recognized -
ie it is found in a currently open grammar with a sufficiently high
confidence"""
def OnRecognition(self, StreamNumber, StreamPosition, RecognitionType, Result):
newResult = win32com.client.Dispatch(Result)
print "You said: ",newResult.PhraseInfo.GetText()
if __name__=='__main__':
wordsToAdd = [ "One", "Two", "Three", "Four" ]
speechReco = SpeechRecognition(wordsToAdd)
while 1:
pythoncom.PumpWaitingMessages()
| mit | -1,125,464,779,408,658,200 | 48.626866 | 92 | 0.686015 | false | 4.074755 | false | false | false |
ThreeSixtyGiving/prototype-tools | scripts/gen-docs.py | 1 | 4111 | # This script generates an intermediate representation of the data model ready for translation into CSV
import json
import operator # Used in sorting
from sets import Set
from genmodel import generateModel, getName
# Change final parameter to False / True depending on whether you want roll-ups or not.
# Note to self: Use python gen-docs.py > ../website/standard/_includes/buildingblocks.html with rollups false for keeping documentation updated.
model = generateModel("http://joinedupdata.org/ontologies/philanthropy/Grant",1,{},False)
print "<ul>"
for table in sorted(model):
print "<li><a href='#"+table+"'>"+table +"</a></li>"
print "</ul>"
print "<p>Details on each of these building blocks can be found below.</p>"
for table in sorted(model):
print "<h4 class='activity' id='" + table + "'><span class='glyphicon glyphicon-th'></span> "+table+"</h4>"
print "<p>"+model[table]["_meta"]['description']+"</p>"
print "<p><strong>Types:</strong> "+ ", ".join(model[table]["_meta"]['types']) + "</p>"
print """
<div class="panel panel-primary">
<div class="panel-heading">
<h4 class="panel-title">
<a data-toggle="collapse" data-target="#%s">
Data properties
</a>
</h4>
</div>
<div id="%s" class="panel-collapse collapse out">
<div class="panel-body">
<table class="table">
<thead>
<tr>
<th>ID</th>
<th>Title (en)</th>
<th>Description</th>
<th>Values</th>
</tr>
</thead><tbody>
""" % ("table-"+table,"table-"+table)
c = 0
cols = []
#Dictionary sorting work-around
for col in model[table]:
if(not(col == '_meta')):
cols.append((col,model[table][col]["weight"]))
cols = sorted(cols,key=lambda x: x[1])
for col in cols:
print "<tr class='rowtype-"+str(model[table][col[0]]['values']).lower()+"'>"
print "<td>" + model[table][col[0]]['name'] + "</td>"
print "<td>" + model[table][col[0]]['title'] + "</td>"
try:
print "<td>" + model[table][col[0]]['description'] + "</td>"
except:
print "<td> No description </td>"
try:
print "<td>" + model[table][col[0]]['values'] + "</td>"
except:
print "<td> No values specified </td>"
print "</tr>"
c = c + 1
print """</tbody></table></div>
</div>
</div>"""
## Put together details of all the relationships
print """
<div class="panel panel-info">
<div class="panel-heading">
<h4 class="panel-title">
<a data-toggle="collapse" data-target="#%s">
Relationship properties
</a>
</h4>
</div>
<div id="%s" class="panel-collapse collapse out">
<div class="panel-body">
<table class="table">
<thead>
<tr>
<th>Relationship</th>
<th>Title</th>
<th>Description</th>
<th>Related to</th>
</tr>
</thead>
<tbody>
""" % ("related-"+table,"related-"+table)
#Dictionary sorting work-around
rcols = []
for col in model[table]['_meta']['related']:
rcols.append((col,model[table]['_meta']['related'][col]["topObject"]))
rcols = sorted(rcols,key=lambda x: x[1])
for related in rcols:
relatedItem = model[table]['_meta']['related'][related[0]]
print "<tr>"
print "<td>" + relatedItem['relationshipName'] + "</td>"
print "<td>" + relatedItem['title'] + "</td>"
print "<td>" + relatedItem['description'] + "</td>"
print "<td> <a href='#" + relatedItem['topObject'] + "'>" + relatedItem['objectName'] + " (" + relatedItem['topObject'] +")</a></td>"
print "</tr>"
print """</tbody></table></div>
</div>
</div>""" | mit | 4,512,297,391,196,105,700 | 35.070175 | 144 | 0.511068 | false | 3.849251 | false | false | false |
lvdongbing/bilean | bilean/engine/clients/os/nova.py | 1 | 10976 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import json
import six
from novaclient import client as nc
from novaclient import exceptions
from novaclient import shell as novashell
from bilean.common import exception
from bilean.common.i18n import _
from bilean.common.i18n import _LW
from bilean.engine.clients import client_plugin
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class NovaClientPlugin(client_plugin.ClientPlugin):
deferred_server_statuses = ['BUILD',
'HARD_REBOOT',
'PASSWORD',
'REBOOT',
'RESCUE',
'RESIZE',
'REVERT_RESIZE',
'SHUTOFF',
'SUSPENDED',
'VERIFY_RESIZE']
exceptions_module = exceptions
def _create(self):
computeshell = novashell.OpenStackComputeShell()
extensions = computeshell._discover_extensions("1.1")
endpoint_type = self._get_client_option('nova', 'endpoint_type')
args = {
'project_id': self.context.tenant,
'auth_url': self.context.auth_url,
'service_type': 'compute',
'username': None,
'api_key': None,
'extensions': extensions,
'endpoint_type': endpoint_type,
'http_log_debug': self._get_client_option('nova',
'http_log_debug'),
'cacert': self._get_client_option('nova', 'ca_file'),
'insecure': self._get_client_option('nova', 'insecure')
}
client = nc.Client(1.1, **args)
management_url = self.url_for(service_type='compute',
endpoint_type=endpoint_type)
client.client.auth_token = self.auth_token
client.client.management_url = management_url
return client
def is_not_found(self, ex):
return isinstance(ex, exceptions.NotFound)
def is_over_limit(self, ex):
return isinstance(ex, exceptions.OverLimit)
def is_bad_request(self, ex):
return isinstance(ex, exceptions.BadRequest)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
def is_unprocessable_entity(self, ex):
http_status = (getattr(ex, 'http_status', None) or
getattr(ex, 'code', None))
return (isinstance(ex, exceptions.ClientException) and
http_status == 422)
def refresh_server(self, server):
'''Refresh server's attributes.
Log warnings for non-critical API errors.
'''
try:
server.get()
except exceptions.OverLimit as exc:
LOG.warn(_LW("Server %(name)s (%(id)s) received an OverLimit "
"response during server.get(): %(exception)s"),
{'name': server.name,
'id': server.id,
'exception': exc})
except exceptions.ClientException as exc:
if ((getattr(exc, 'http_status', getattr(exc, 'code', None)) in
(500, 503))):
LOG.warn(_LW('Server "%(name)s" (%(id)s) received the '
'following exception during server.get(): '
'%(exception)s'),
{'name': server.name,
'id': server.id,
'exception': exc})
else:
raise
def get_ip(self, server, net_type, ip_version):
"""Return the server's IP of the given type and version."""
if net_type in server.addresses:
for ip in server.addresses[net_type]:
if ip['version'] == ip_version:
return ip['addr']
def get_status(self, server):
'''Return the server's status.
:param server: server object
:returns: status as a string
'''
# Some clouds append extra (STATUS) strings to the status, strip it
return server.status.split('(')[0]
def get_flavor_id(self, flavor):
'''Get the id for the specified flavor name.
If the specified value is flavor id, just return it.
:param flavor: the name of the flavor to find
:returns: the id of :flavor:
:raises: exception.FlavorMissing
'''
flavor_id = None
flavor_list = self.client().flavors.list()
for o in flavor_list:
if o.name == flavor:
flavor_id = o.id
break
if o.id == flavor:
flavor_id = o.id
break
if flavor_id is None:
raise exception.FlavorMissing(flavor_id=flavor)
return flavor_id
def get_keypair(self, key_name):
'''Get the public key specified by :key_name:
:param key_name: the name of the key to look for
:returns: the keypair (name, public_key) for :key_name:
:raises: exception.UserKeyPairMissing
'''
try:
return self.client().keypairs.get(key_name)
except exceptions.NotFound:
raise exception.UserKeyPairMissing(key_name=key_name)
def delete_server(self, server):
'''Deletes a server and waits for it to disappear from Nova.'''
if not server:
return
try:
server.delete()
except Exception as exc:
self.ignore_not_found(exc)
return
while True:
yield
try:
self.refresh_server(server)
except Exception as exc:
self.ignore_not_found(exc)
break
else:
# Some clouds append extra (STATUS) strings to the status
short_server_status = server.status.split('(')[0]
if short_server_status in ("DELETED", "SOFT_DELETED"):
break
if short_server_status == "ERROR":
fault = getattr(server, 'fault', {})
message = fault.get('message', 'Unknown')
code = fault.get('code')
errmsg = (_("Server %(name)s delete failed: (%(code)s) "
"%(message)s"))
raise exception.Error(errmsg % {"name": server.name,
"code": code,
"message": message})
def delete(self, server_id):
'''Delete a server by given server id'''
self.client().servers.delete(server_id)
def resize(self, server, flavor, flavor_id):
"""Resize the server and then call check_resize task to verify."""
server.resize(flavor_id)
yield self.check_resize(server, flavor, flavor_id)
def rename(self, server, name):
"""Update the name for a server."""
server.update(name)
def check_resize(self, server, flavor, flavor_id):
"""Verify that a resizing server is properly resized.
If that's the case, confirm the resize, if not raise an error.
"""
self.refresh_server(server)
while server.status == 'RESIZE':
yield
self.refresh_server(server)
if server.status == 'VERIFY_RESIZE':
server.confirm_resize()
else:
raise exception.Error(
_("Resizing to '%(flavor)s' failed, status '%(status)s'") %
dict(flavor=flavor, status=server.status))
def rebuild(self, server, image_id, preserve_ephemeral=False):
"""Rebuild the server and call check_rebuild to verify."""
server.rebuild(image_id, preserve_ephemeral=preserve_ephemeral)
yield self.check_rebuild(server, image_id)
def check_rebuild(self, server, image_id):
"""Verify that a rebuilding server is rebuilt.
Raise error if it ends up in an ERROR state.
"""
self.refresh_server(server)
while server.status == 'REBUILD':
yield
self.refresh_server(server)
if server.status == 'ERROR':
raise exception.Error(
_("Rebuilding server failed, status '%s'") % server.status)
def meta_serialize(self, metadata):
"""Serialize non-string metadata values before sending them to Nova."""
if not isinstance(metadata, collections.Mapping):
raise exception.StackValidationFailed(message=_(
"nova server metadata needs to be a Map."))
return dict((key, (value if isinstance(value,
six.string_types)
else json.dumps(value))
) for (key, value) in metadata.items())
def meta_update(self, server, metadata):
"""Delete/Add the metadata in nova as needed."""
metadata = self.meta_serialize(metadata)
current_md = server.metadata
to_del = [key for key in current_md.keys() if key not in metadata]
client = self.client()
if len(to_del) > 0:
client.servers.delete_meta(server, to_del)
client.servers.set_meta(server, metadata)
def server_to_ipaddress(self, server):
'''Return the server's IP address, fetching it from Nova.'''
try:
server = self.client().servers.get(server)
except exceptions.NotFound as ex:
LOG.warn(_LW('Instance (%(server)s) not found: %(ex)s'),
{'server': server, 'ex': ex})
else:
for n in server.networks:
if len(server.networks[n]) > 0:
return server.networks[n][0]
def get_server(self, server):
try:
return self.client().servers.get(server)
except exceptions.NotFound as ex:
LOG.warn(_LW('Server (%(server)s) not found: %(ex)s'),
{'server': server, 'ex': ex})
raise exception.ServerNotFound(server=server)
def absolute_limits(self):
"""Return the absolute limits as a dictionary."""
limits = self.client().limits.get()
return dict([(limit.name, limit.value)
for limit in list(limits.absolute)])
| apache-2.0 | -7,248,169,001,603,073,000 | 36.333333 | 79 | 0.545645 | false | 4.476346 | false | false | false |
openstack/python-magnumclient | magnumclient/osc/v1/stats.py | 1 | 1441 | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from magnumclient.common import cliutils as utils
from magnumclient.i18n import _
from osc_lib.command import command
class ListStats(command.Command):
_description = _("Show stats for the given project_id")
def get_parser(self, prog_name):
parser = super(ListStats, self).get_parser(prog_name)
parser.add_argument('project_id',
metavar='<project>',
help='Project ID')
return parser
def take_action(self, parsed_args):
mag_client = self.app.client_manager.container_infra
opts = {
'project_id': parsed_args.project_id
}
stats = mag_client.stats.list(**opts)
try:
utils.print_dict(stats._info)
except AttributeError:
return None
| apache-2.0 | -6,872,251,423,533,257,000 | 34.146341 | 78 | 0.653713 | false | 4.16474 | false | false | false |
intuition-io/intuition | tests/api/test_datafeed.py | 1 | 9643 | '''
Tests for intuition.api.datafeed
'''
import unittest
from nose.tools import raises, ok_, eq_, nottest
import random
import pytz
import datetime as dt
import pandas as pd
import intuition.api.datafeed as datafeed
from intuition.data.universe import Market
from intuition.errors import InvalidDatafeed
import dna.test_utils
class FakeBacktestDatasource(object):
def __init__(self, sids, properties):
pass
@property
def mapping(self):
return {
'backtest': (lambda x: True, 'sid'),
'dt': (lambda x: x, 'dt'),
'sid': (lambda x: x, 'sid'),
'price': (float, 'price'),
'volume': (int, 'volume'),
}
def get_data(self, sids, start, end):
index = pd.date_range(start, end, tz=pytz.utc)
return pd.DataFrame({sid: [random.random()] * len(index)
for sid in sids}, index=index)
class FakePanelBacktestDatasource(object):
def __init__(self, sids, properties):
pass
@property
def mapping(self):
return {
'backtest': (lambda x: True, 'sid'),
'dt': (lambda x: x, 'dt'),
'sid': (lambda x: x, 'sid'),
'price': (float, 'price'),
'low': (float, 'low'),
'high': (float, 'high'),
'volume': (int, 'volume'),
}
def get_data(self, sids, start, end):
index = pd.date_range(start, end, tz=pytz.utc)
fake_data = {}
for sid in sids:
fake_data[sid] = pd.DataFrame(
{field: [random.random()] * len(index)
for field in ['price', 'low', 'high', 'volume']}, index=index)
return pd.Panel(fake_data)
class FakePanelWithoutVolumeBacktestDatasource(object):
def __init__(self, sids, properties):
pass
def get_data(self, sids, start, end):
index = pd.date_range(start, end, tz=pytz.utc)
fake_data = {}
for sid in sids:
fake_data[sid] = pd.DataFrame(
{field: [random.random()] * len(index)
for field in ['price', 'low', 'high']}, index=index)
return pd.Panel(fake_data)
class FakeLiveDatasource(object):
def __init__(self, sids, properties):
pass
@property
def mapping(self):
return {
'live': True
}
def get_data(self, sids, start, end):
return pd.DataFrame()
class DatafeedUtilsTestCase(unittest.TestCase):
def setUp(self):
dna.test_utils.setup_logger(self)
self.fake_sid = 'fake_sid'
self.fake_one_sid_series = pd.Series(
{key: random.random() for key in ['low', 'close']})
self.fake_multiple_sids_series = pd.Series(
{key: random.random() for key in ['goog', 'fake_sid']})
self.fake_multiple_sids_df = pd.DataFrame(
{key: {'price': random.random(), 'close': 0.3}
for key in ['goog', 'fake_sid']})
self.fake_date = dt.datetime(2013, 1, 1)
def tearDown(self):
dna.test_utils.teardown_logger(self)
@nottest
def _check_event(self, event):
self.assertIsInstance(event, dict)
self.assertIn('volume', event)
self.assertIn('dt', event)
eq_(event['dt'], self.fake_date)
eq_(event['sid'], self.fake_sid)
def test_build_safe_event_without_volume(self):
partial_event = self.fake_one_sid_series.to_dict()
event = datafeed._build_safe_event(
partial_event, self.fake_date, self.fake_sid)
self._check_event(event)
for field in self.fake_one_sid_series.index:
self.assertIn(field, event.keys())
def test_build_safe_event_with_volume(self):
partial_event = self.fake_one_sid_series.to_dict()
partial_event.update({'volume': 12034})
event = datafeed._build_safe_event(
partial_event, self.fake_date, self.fake_sid)
self._check_event(event)
for field in self.fake_one_sid_series.index:
self.assertIn(field, event.keys())
@raises(AttributeError)
def test_wrong_data_type(self):
wrong_type = bool
datafeed._build_safe_event(wrong_type, self.fake_date, self.fake_sid)
def test_check_data_modules(self):
end = self.fake_date + pd.datetools.MonthBegin(6)
ok_(datafeed._check_data_modules(
'backtest.module', None, self.fake_date, end))
@raises(InvalidDatafeed)
def test_check_data_modules_all_nones(self):
end = self.fake_date + pd.datetools.MonthBegin(6)
datafeed._check_data_modules(None, None, self.fake_date, end)
class HybridDataFactoryTestCase(unittest.TestCase):
def setUp(self):
dna.test_utils.setup_logger(self)
self.test_index = pd.date_range(
'2012/01/01', '2012/01/7', tz=pytz.utc)
self.test_universe = 'forex,5'
self.market = Market()
self.market.parse_universe_description(self.test_universe)
self.test_sids = self.market.sids
def tearDown(self):
dna.test_utils.teardown_logger(self)
@nottest
def _check_datasource(self, source):
ok_((source.index == self.test_index).all())
eq_(source.start, self.test_index[0])
eq_(source.end, self.test_index[-1])
eq_(source.sids, self.test_sids)
self.assertIsNone(source._raw_data)
eq_(source.arg_string, source.instance_hash)
eq_(source.event_type, 4)
ok_(hasattr(source, 'log'))
self.assertFalse(source._is_live)
@raises(InvalidDatafeed)
def test_data_source_without_modules(self):
config = {
'sids': self.test_sids,
'index': self.test_index
}
datafeed.HybridDataFactory(**config)
@raises(InvalidDatafeed)
def test_data_source_invalid_index(self):
config = {
'sids': self.test_sids,
'index': bool
}
datafeed.HybridDataFactory(**config)
def test_minimal_data_source(self):
source = datafeed.HybridDataFactory(
universe=self.market,
index=self.test_index,
backtest=FakeBacktestDatasource)
self._check_datasource(source)
def test_hybrid_mapping(self):
source = datafeed.HybridDataFactory(
universe=self.market,
index=self.test_index,
backtest=FakeBacktestDatasource,
live=FakeLiveDatasource)
self.assertIn('backtest', source.mapping)
source._is_live = True
self.assertIn('live', source.mapping)
# TODO Test Live data sources
class SpecificMarketDataFactoryTestCase(unittest.TestCase):
def setUp(self):
dna.test_utils.setup_logger(self)
self.test_index = pd.date_range(
'2012/01/01', '2012/01/7', tz=pytz.utc)
def tearDown(self):
dna.test_utils.teardown_logger(self)
def test_dataframe_forex_backtest_data_generation(self):
test_universe = 'forex,5'
market = Market()
market.parse_universe_description(test_universe)
source = datafeed.HybridDataFactory(
universe=market,
index=self.test_index,
backtest=FakeBacktestDatasource)
total_rows = 0
for row in source.raw_data:
if not total_rows:
self.assertListEqual(
sorted(row.keys()),
sorted(['dt', 'price', 'sid', 'volume']))
total_rows += 1
eq_(total_rows, 2 * len(self.test_index) * len(market.sids))
def test_dataframe_cac40_backtest_data_generation(self):
test_universe = 'stocks:paris:cac40'
market = Market()
market.parse_universe_description(test_universe)
source = datafeed.HybridDataFactory(
universe=market,
index=self.test_index,
backtest=FakeBacktestDatasource)
total_rows = 0
for row in source.raw_data:
if not total_rows:
self.assertListEqual(
sorted(row.keys()),
sorted(['dt', 'price', 'sid', 'volume']))
total_rows += 1
eq_(total_rows, len(self.test_index) * len(market.sids))
def test_panel_cac40_backtest_data_generation(self):
test_universe = 'stocks:paris:cac40'
market = Market()
market.parse_universe_description(test_universe)
source = datafeed.HybridDataFactory(
universe=market,
index=self.test_index,
backtest=FakePanelBacktestDatasource)
total_rows = 0
for row in source.raw_data:
if not total_rows:
self.assertListEqual(
sorted(row.keys()),
sorted(['dt', 'price', 'low', 'high', 'sid', 'volume']))
total_rows += 1
eq_(total_rows, len(self.test_index) * len(market.sids))
def test_panel_without_volume_cac40_backtest_data_generation(self):
test_universe = 'stocks:paris:cac40,5'
market = Market()
market.parse_universe_description(test_universe)
source = datafeed.HybridDataFactory(
universe=market,
index=self.test_index,
backtest=FakePanelWithoutVolumeBacktestDatasource)
total_rows = 0
for row in source.raw_data:
if not total_rows:
self.assertListEqual(
sorted(row.keys()),
sorted(['dt', 'price', 'low', 'high', 'sid', 'volume']))
total_rows += 1
eq_(total_rows, len(self.test_index) * len(market.sids))
| apache-2.0 | 4,866,306,927,844,752,000 | 32.137457 | 79 | 0.579488 | false | 3.647126 | true | false | false |
apple/swift-lldb | packages/Python/lldbsuite/test/lang/cpp/virtual/TestVirtual.py | 5 | 3717 | """
Test C++ virtual function and virtual inheritance.
"""
from __future__ import print_function
import os
import re
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
def Msg(expr, val):
return "'expression %s' matches the output (from compiled code): %s" % (
expr, val)
class CppVirtualMadness(TestBase):
mydir = TestBase.compute_mydir(__file__)
# This is the pattern by design to match the "my_expr = 'value'" output from
# printf() stmts (see main.cpp).
pattern = re.compile("^([^=]*) = '([^=]*)'$")
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.cpp.
self.source = 'main.cpp'
self.line = line_number(self.source, '// Set first breakpoint here.')
@expectedFailureAll(
compiler="icc",
bugnumber="llvm.org/pr16808 lldb does not call the correct virtual function with icc.")
@skipIfWindows # This test will hang on windows llvm.org/pr21753
@expectedFlakeyNetBSD
def test_virtual_madness(self):
"""Test that expression works correctly with virtual inheritance as well as virtual function."""
self.build()
# Bring the program to the point where we can issue a series of
# 'expression' command to compare against the golden output.
self.dbg.SetAsync(False)
# Create a target by the debugger.
target = self.dbg.CreateTarget(self.getBuildArtifact("a.out"))
self.assertTrue(target, VALID_TARGET)
# Create the breakpoint inside function 'main'.
breakpoint = target.BreakpointCreateByLocation(self.source, self.line)
self.assertTrue(breakpoint, VALID_BREAKPOINT)
# Now launch the process, and do not stop at entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
self.assertTrue(process, PROCESS_IS_VALID)
self.assertTrue(process.GetState() == lldb.eStateStopped)
thread = lldbutil.get_stopped_thread(
process, lldb.eStopReasonBreakpoint)
self.assertTrue(
thread.IsValid(),
"There should be a thread stopped due to breakpoint condition")
# First, capture the golden output from the program itself.
golden = thread.GetFrameAtIndex(0).FindVariable("golden")
self.assertTrue(
golden.IsValid(),
"Encountered an error reading the process's golden variable")
error = lldb.SBError()
golden_str = process.ReadCStringFromMemory(
golden.AddressOf().GetValueAsUnsigned(), 4096, error)
self.assertTrue(error.Success())
self.assertTrue("c_as_C" in golden_str)
# This golden list contains a list of "my_expr = 'value' pairs extracted
# from the golden output.
gl = []
# Scan the golden output line by line, looking for the pattern:
#
# my_expr = 'value'
#
for line in golden_str.split(os.linesep):
match = self.pattern.search(line)
if match:
my_expr, val = match.group(1), match.group(2)
gl.append((my_expr, val))
#print("golden list:", gl)
# Now iterate through the golden list, comparing against the output from
# 'expression var'.
for my_expr, val in gl:
self.runCmd("expression %s" % my_expr)
output = self.res.GetOutput()
# The expression output must match the oracle.
self.expect(output, Msg(my_expr, val), exe=False,
substrs=[val])
| apache-2.0 | -5,776,541,678,099,727,000 | 35.087379 | 104 | 0.62685 | false | 4.062295 | true | false | false |
Jordan-Zhu/RoboVision | unsorted/part1.py | 1 | 5653 | # Import the necessary packages
import cv2
import scipy.io as sio
import numpy as np
import util as util
import edge_detect as ed
from lineseg import lineseg
from drawedgelist import drawedgelist
from python.Lseg_to_Lfeat_v4 import create_linefeatures
from python.merge_lines_v4 import merge_lines
from python.LabelLineCurveFeature_v4 import classify_curves
from python.LabelLineFeature_v1 import label_line_features
from python.line_match import line_match
def initContours(img):
# edges = edge_detect(img)
curve_disc, curve_con, depth_disc, depth_con, edges = edge_detect(img)
seg_list = lineseg(edges, tol=2)
cntrs = find_contours(img)
# ADVANCED SLICING
for i in range(cntrs.shape[0]):
swap_cols(cntrs[i], 0, 1)
return seg_list, edges, cntrs
def draw_lfeat(line_feature, img):
# blank_image = normalize_depth(img, colormap=True)
height = img.shape[0]
width = img.shape[1]
blank_image = np.zeros((height, width, 3), np.uint8)
for i, e in enumerate(line_feature):
x1 = int(e[1])
y1 = int(e[0])
x2 = int(e[3])
y2 = int(e[2])
color = (rand.randint(0, 255), rand.randint(0, 255), rand.randint(0, 255))
cv2.line(blank_image, (x1, y1), (x2, y2), color, 1)
cv2.namedWindow('Line features', cv2.WINDOW_NORMAL)
cv2.imshow('Line features', blank_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
def draw_listpair(list_pair, line_feature, img):
blank_image = normalize_depth(img, colormap=True)
for i, e in enumerate(list_pair):
color = (rand.randint(0, 255), rand.randint(0, 255), rand.randint(0, 255))
for j, e in enumerate(e):
line = line_feature[np.where(line_feature[:, 7] == e)[0]][0]
x1 = int(line[1])
y1 = int(line[0])
x2 = int(line[3])
y2 = int(line[2])
cv2.line(blank_image, (x1, y1), (x2, y2), color, 2)
cv2.namedWindow('Line features', cv2.WINDOW_NORMAL)
cv2.imshow('Line features', blank_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == '__main__':
img = cv2.imread('../img/learn0.png', -1)
# img = img[50:, 50:480]
im_size = img.shape
height = img.shape[0]
width = img.shape[1]
P = sio.loadmat('Parameter.mat')
param = P['P']
# ******* SECTION 1 *******
# FIND DEPTH / CURVATURE DISCONTINUITIES.
curve_disc, curve_con, depth_disc, depth_con, dst = ed.edge_detect(img)
blank_image = np.zeros((height, width, 3), np.uint8)
# draw_contours(blank_image, dst)
# drawedgelist(dst)
# print(dst)
# Remove extra dimensions from data
res = lineseg(dst, tol=2)
seglist = []
for i in range(res.shape[0]):
# print('shape', res[i].shape)
if res[i].shape[0] > 2:
# print(res[i])
# print(res[i][0])
seglist.append(np.concatenate((res[i], [res[i][0]])))
else:
seglist.append(res[i])
seglist = np.array(seglist)
# print(seglist)
# seg_curve = lineseg(curve_con, tol=1)
# seg_disc = lineseg(depth_con, tol=1)
# seg_list = np.hstack((seg_curve, seg_disc))
# print(seg_disc)
# seg_list, edges, cntrs = initContours(img)
# print(dst.shape)
drawedgelist(seglist)
# drawedgelist(seg_curve)
# ******* SECTION 2 *******
# SEGMENT AND LABEL THE CURVATURE LINES (CONVEX/CONCAVE).
LineFeature, ListPoint = create_linefeatures(seglist, dst, im_size)
Line_new, ListPoint_new, line_merged = merge_lines(LineFeature, ListPoint, 10, im_size)
draw_lfeat(Line_new, img)
# print(line_merged)
line_newC = classify_curves(img, Line_new, ListPoint_new, 11)
draw_convex(line_newC, img)
# LineFeature_curve, ListPoint_curve = create_linefeatures(seg_curve, curve_con, im_size)
# Line_new, ListPoint_new, line_merged = merge_lines(LineFeature_curve, ListPoint_curve, 10, im_size)
# print('Line_new size:', Line_new.shape)
# draw_lfeat(Line_new, img)
# LineFeature_disc, ListPoint_disc = create_linefeatures(seg_disc, depth_con, im_size)
# Line_new, ListPoint_new, line_merged = merge_lines(LineFeature_disc, ListPoint_disc, 10, im_size)
# print('Line_new size:', Line_new.shape)
# draw_lfeat(Line_new, img)
# seg_list, edges, cntrs = initContours(img)
# LineFeature, ListPoint = create_linefeatures(seg_list, cntrs, im_size)
# Line_new, ListPoint_new, line_merged = merge_lines(LineFeature, ListPoint, 10, im_size)
# draw_lfeat(Line_new, img)
# line_newC = classify_curves(img, Line_new, ListPoint_new, 11)
# draw_convex(line_newC, img)
# Remove the 11th column for post-processing
line_newC = np.delete(line_newC, 10, axis=1)
line_new_new = label_line_features(img, edges, line_newC, param)
print('Line_new:', line_new_new.shape)
# Keep the lines that are curvature / discontinuities
relevant_lines = np.where(line_new_new[:, 10] != 0)[0]
line_interesting = line_new_new[relevant_lines]
# Fast sorting, done based on line angle
line_interesting = line_interesting[line_interesting[:, 6].argsort()]
print('Line interesting:', line_interesting.shape)
draw_lfeat(line_interesting, img)
# Match the lines into pairs
list_pair = line_match(line_interesting, param)
print('List pair:', list_pair)
draw_listpair(list_pair, line_interesting, img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| gpl-3.0 | 1,339,291,607,766,705,700 | 31.850299 | 105 | 0.616487 | false | 3.126659 | false | false | false |
tileung/DrugsInCPGs | src/ngc/IncludedGLs_byCUI_byTargetPop.py | 1 | 1638 | ## Created: 09/29/15
## Adapted from IncludedGL_byCUI.pt
## Purpose: final list of included CPGs and their disease categories/GUIs
import os
import sys
import csv
## Set path
path = "C:\\Users\\tileung\\Dropbox\\Py Stuffs - Drugs in CPGs\\SeptCode\\"
## Initiate file to save results
fName = path + "IncludedGLs_byCUI_byPop.txt"
try:
os.remove(fName)
print("old file removed")
except OSError:
pass
results_file = "IncludedGLs_byCUI_byPop.tab"
results = open(results_file,"w")
## Identify files for comparison
byCUI_list = path + "unique_IncludedGLs_byCUI.txt"
byPop_list = path + "IncludedCPGs_popExclusions1018.txt"
f = open(byCUI_list, 'r')
freader = csv.reader(f, dialect = csv.excel_tab)
for frow in freader:
try:
GL_no = str(frow[0]) ##match to same key in byPop_list
GL_cui = str(frow[3])
GL_title = frow[1]
GL_link = frow[2]
GL_icd = str(frow[4])
GL_cat = frow[5]
GL_icdlong = frow[6]
g = open(byPop_list, 'r')
greader = csv.reader(g, dialect = csv.excel_tab)
for grow in greader:
GL_iNo = str(grow[0]) ## match to same key in byCUI_list
GL_iTitle = grow[1]
## GL_iPop = grow[2]
if GL_iNo == GL_no:
line = GL_no + '\t' + GL_title + '\t' + GL_iTitle + '\t' + GL_link + '\t' + GL_cui + '\t' + GL_icd + '\t' + GL_cat + '\t' + GL_icdlong + '\n'
## print line
## sys.exit(0)
print GL_no
results.write(line)
except IndexError as e:
continue
results.close()
sys.exit(0)
| gpl-2.0 | -8,420,446,462,738,579,000 | 25.852459 | 157 | 0.567155 | false | 2.838821 | false | false | false |
nwjs/chromium.src | tools/chrome_proxy/webdriver/variations_combinations.py | 2 | 4059 | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import io
import os
import platform
import sys
import time
import unittest
import common
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'tools', 'variations'))
import fieldtrial_util
test_blacklist = [
# These tests set their own field trials and should be ignored.
'quic.Quic.testCheckPageWithQuicProxy',
'quic.Quic.testCheckPageWithQuicProxyTransaction',
'smoke.Smoke.testCheckPageWithHoldback',
]
def GetExperimentArgs():
"""Returns a list of arguments with all tested field trials.
This function is a simple wrapper around the variation team's fieldtrail_util
script that generates command line arguments to test Chromium field trials.
Returns:
an array of command line arguments to pass to chrome
"""
config_path = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
os.pardir, 'testing', 'variations', 'fieldtrial_testing_config.json')
my_platform = ''
if common.ParseFlags().android:
my_platform = 'android'
elif platform.system().lower() == 'linux':
my_platform = 'linux'
elif platform.system().lower() == 'windows':
my_platform = 'windows'
elif platform.system().lower() == 'darwin':
my_platform = 'mac'
else:
raise Exception('unknown platform!')
return fieldtrial_util.GenerateArgs(config_path, my_platform)
def GenerateTestSuites():
"""A generator function that yields non-blacklisted tests to run.
This function yields test suites each with a single test case whose id is not
blacklisted in the array at the top of this file.
Yields:
non-blacklisted test suites to run
"""
loader = unittest.TestLoader()
for test_suite in loader.discover(os.path.dirname(__file__), pattern='*.py'):
for test_case in test_suite:
for test_method in test_case:
if test_method.id() not in test_blacklist:
ts = unittest.TestSuite()
ts.addTest(test_method)
yield (ts, test_method.id())
def ParseFlagsWithExtraBrowserArgs(extra_args):
"""Generates a function to override common.ParseFlags.
The returned function will honor everything in the original ParseFlags(), but
adds on additional browser_args.
Args:
extra_args: The extra browser agruments to add.
Returns:
A function to override common.ParseFlags with additional browser_args.
"""
original_flags = common.ParseFlags()
def AddExtraBrowserArgs():
original_flags.browser_args = ((original_flags.browser_args if
original_flags.browser_args else '') + ' ' + extra_args)
return original_flags
return AddExtraBrowserArgs
def main():
"""Runs all non-blacklisted tests against Chromium field trials.
This script run all chrome proxy integration tests that haven't been
blacklisted against the field trial testing configuration used by Chromium
perf bots.
"""
flags = common.ParseFlags()
experiment_args = ' '.join(GetExperimentArgs())
common.ParseFlags = ParseFlagsWithExtraBrowserArgs(experiment_args)
# Each test is wrapped in its own test suite so results can be evaluated
# individually.
for test_suite, test_id in GenerateTestSuites():
buf = io.BytesIO()
sys.stdout.write('%s... ' % test_id)
sys.stdout.flush()
testRunner = unittest.runner.TextTestRunner(stream=buf, verbosity=2,
buffer=(not flags.disable_buffer))
result = testRunner.run(test_suite)
if result.wasSuccessful():
print('ok')
else:
print('failed')
print(buf.getvalue())
print('To repeat this test, run: ')
print("%s %s %s --test_filter=%s --browser_args='%s'" % (
sys.executable,
os.path.join(os.path.dirname(__file__), 'run_all_tests.py'), ' '.join(
sys.argv[1:]), '.'.join(test_id.split('.')[1:]), experiment_args))
if flags.failfast:
return
if __name__ == '__main__':
main()
| bsd-3-clause | -3,198,790,310,492,998,700 | 32.825 | 80 | 0.697216 | false | 3.832861 | true | false | false |
JamesShaeffer/QGIS | python/plugins/processing/ProcessingPlugin.py | 17 | 14208 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ProcessingPlugin.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import shutil
import os
import sys
from functools import partial
from qgis.core import (QgsApplication,
QgsProcessingUtils,
QgsProcessingModelAlgorithm,
QgsDataItemProvider,
QgsDataProvider,
QgsDataItem,
QgsMapLayerType,
QgsMimeDataUtils)
from qgis.gui import (QgsOptionsWidgetFactory,
QgsCustomDropHandler)
from qgis.PyQt.QtCore import Qt, QCoreApplication, QDir, QFileInfo
from qgis.PyQt.QtWidgets import QMenu, QAction
from qgis.PyQt.QtGui import QIcon, QKeySequence
from qgis.utils import iface
from processing.core.Processing import Processing
from processing.gui.AlgorithmDialog import AlgorithmDialog
from processing.gui.ProcessingToolbox import ProcessingToolbox
from processing.gui.HistoryDialog import HistoryDialog
from processing.gui.ConfigDialog import ConfigOptionsPage
from processing.gui.ResultsDock import ResultsDock
from processing.gui.AlgorithmLocatorFilter import (AlgorithmLocatorFilter,
InPlaceAlgorithmLocatorFilter)
from processing.modeler.ModelerDialog import ModelerDialog
from processing.tools.system import tempHelpFolder
from processing.gui.menus import removeMenus, initializeMenus, createMenus, createButtons, removeButtons
from processing.core.ProcessingResults import resultsList
pluginPath = os.path.dirname(__file__)
class ProcessingOptionsFactory(QgsOptionsWidgetFactory):
def __init__(self):
super(QgsOptionsWidgetFactory, self).__init__()
def icon(self):
return QgsApplication.getThemeIcon('/processingAlgorithm.svg')
def createWidget(self, parent):
return ConfigOptionsPage(parent)
class ProcessingDropHandler(QgsCustomDropHandler):
def handleFileDrop(self, file):
if not file.lower().endswith('.model3'):
return False
return self.runAlg(file)
@staticmethod
def runAlg(file):
alg = QgsProcessingModelAlgorithm()
if not alg.fromFile(file):
return False
alg.setProvider(QgsApplication.processingRegistry().providerById('model'))
dlg = AlgorithmDialog(alg, parent=iface.mainWindow())
dlg.show()
return True
def customUriProviderKey(self):
return 'processing'
def handleCustomUriDrop(self, uri):
path = uri.uri
self.runAlg(path)
class ProcessingModelItem(QgsDataItem):
def __init__(self, parent, name, path):
super(ProcessingModelItem, self).__init__(QgsDataItem.Custom, parent, name, path)
self.setState(QgsDataItem.Populated) # no children
self.setIconName(":/images/themes/default/processingModel.svg")
self.setToolTip(QDir.toNativeSeparators(path))
def hasDragEnabled(self):
return True
def handleDoubleClick(self):
self.runModel()
return True
def mimeUri(self):
u = QgsMimeDataUtils.Uri()
u.layerType = "custom"
u.providerKey = "processing"
u.name = self.name()
u.uri = self.path()
return u
def runModel(self):
ProcessingDropHandler.runAlg(self.path())
def editModel(self):
dlg = ModelerDialog.create()
dlg.loadModel(self.path())
dlg.show()
def actions(self, parent):
run_model_action = QAction(QCoreApplication.translate('ProcessingPlugin', '&Run Model…'), parent)
run_model_action.triggered.connect(self.runModel)
edit_model_action = QAction(QCoreApplication.translate('ProcessingPlugin', '&Edit Model…'), parent)
edit_model_action.triggered.connect(self.editModel)
return [run_model_action, edit_model_action]
class ProcessingDataItemProvider(QgsDataItemProvider):
def __init__(self):
super(ProcessingDataItemProvider, self).__init__()
def name(self):
return 'processing'
def capabilities(self):
return QgsDataProvider.File
def createDataItem(self, path, parentItem):
file_info = QFileInfo(path)
if file_info.suffix().lower() == 'model3':
alg = QgsProcessingModelAlgorithm()
if alg.fromFile(path):
return ProcessingModelItem(parentItem, alg.name(), path)
return None
class ProcessingPlugin:
def __init__(self, iface):
self.iface = iface
self.options_factory = None
self.drop_handler = None
self.item_provider = None
self.locator_filter = None
self.edit_features_locator_filter = None
self.initialized = False
self.initProcessing()
def initProcessing(self):
if not self.initialized:
self.initialized = True
Processing.initialize()
def initGui(self):
self.options_factory = ProcessingOptionsFactory()
self.options_factory.setTitle(self.tr('Processing'))
iface.registerOptionsWidgetFactory(self.options_factory)
self.drop_handler = ProcessingDropHandler()
iface.registerCustomDropHandler(self.drop_handler)
self.item_provider = ProcessingDataItemProvider()
QgsApplication.dataItemProviderRegistry().addProvider(self.item_provider)
self.locator_filter = AlgorithmLocatorFilter()
iface.registerLocatorFilter(self.locator_filter)
# Invalidate the locator filter for in-place when active layer changes
iface.currentLayerChanged.connect(lambda _: self.iface.invalidateLocatorResults())
self.edit_features_locator_filter = InPlaceAlgorithmLocatorFilter()
iface.registerLocatorFilter(self.edit_features_locator_filter)
self.toolbox = ProcessingToolbox()
self.iface.addDockWidget(Qt.RightDockWidgetArea, self.toolbox)
self.toolbox.hide()
self.toolbox.visibilityChanged.connect(self.toolboxVisibilityChanged)
self.resultsDock = ResultsDock()
self.iface.addDockWidget(Qt.RightDockWidgetArea, self.resultsDock)
self.resultsDock.hide()
self.menu = QMenu(self.iface.mainWindow().menuBar())
self.menu.setObjectName('processing')
self.menu.setTitle(self.tr('Pro&cessing'))
self.toolboxAction = QAction(self.tr('&Toolbox'), self.iface.mainWindow())
self.toolboxAction.setCheckable(True)
self.toolboxAction.setObjectName('toolboxAction')
self.toolboxAction.setIcon(
QgsApplication.getThemeIcon("/processingAlgorithm.svg"))
self.iface.registerMainWindowAction(self.toolboxAction,
QKeySequence('Ctrl+Alt+T').toString(QKeySequence.NativeText))
self.toolboxAction.toggled.connect(self.openToolbox)
self.iface.attributesToolBar().insertAction(self.iface.actionOpenStatisticalSummary(), self.toolboxAction)
self.menu.addAction(self.toolboxAction)
self.modelerAction = QAction(
QgsApplication.getThemeIcon("/processingModel.svg"),
QCoreApplication.translate('ProcessingPlugin', '&Graphical Modeler…'), self.iface.mainWindow())
self.modelerAction.setObjectName('modelerAction')
self.modelerAction.triggered.connect(self.openModeler)
self.iface.registerMainWindowAction(self.modelerAction,
QKeySequence('Ctrl+Alt+G').toString(QKeySequence.NativeText))
self.menu.addAction(self.modelerAction)
self.historyAction = QAction(
QgsApplication.getThemeIcon("/mIconHistory.svg"),
QCoreApplication.translate('ProcessingPlugin', '&History…'), self.iface.mainWindow())
self.historyAction.setObjectName('historyAction')
self.historyAction.triggered.connect(self.openHistory)
self.iface.registerMainWindowAction(self.historyAction,
QKeySequence('Ctrl+Alt+H').toString(QKeySequence.NativeText))
self.menu.addAction(self.historyAction)
self.toolbox.processingToolbar.addAction(self.historyAction)
self.resultsAction = QAction(
QgsApplication.getThemeIcon("/processingResult.svg"),
self.tr('&Results Viewer'), self.iface.mainWindow())
self.resultsAction.setObjectName('resultsViewer')
self.resultsAction.setCheckable(True)
self.iface.registerMainWindowAction(self.resultsAction,
QKeySequence('Ctrl+Alt+R').toString(QKeySequence.NativeText))
self.menu.addAction(self.resultsAction)
self.toolbox.processingToolbar.addAction(self.resultsAction)
self.resultsDock.visibilityChanged.connect(self.resultsAction.setChecked)
self.resultsAction.toggled.connect(self.resultsDock.setUserVisible)
self.toolbox.processingToolbar.addSeparator()
self.editInPlaceAction = QAction(
QgsApplication.getThemeIcon("/mActionProcessSelected.svg"),
self.tr('Edit Features In-Place'), self.iface.mainWindow())
self.editInPlaceAction.setObjectName('editInPlaceFeatures')
self.editInPlaceAction.setCheckable(True)
self.editInPlaceAction.toggled.connect(self.editSelected)
self.menu.addAction(self.editInPlaceAction)
self.toolbox.processingToolbar.addAction(self.editInPlaceAction)
self.toolbox.processingToolbar.addSeparator()
self.optionsAction = QAction(
QgsApplication.getThemeIcon("/mActionOptions.svg"),
self.tr('Options'), self.iface.mainWindow())
self.optionsAction.setObjectName('optionsAction')
self.optionsAction.triggered.connect(self.openProcessingOptions)
self.toolbox.processingToolbar.addAction(self.optionsAction)
menuBar = self.iface.mainWindow().menuBar()
menuBar.insertMenu(
self.iface.firstRightStandardMenu().menuAction(), self.menu)
self.menu.addSeparator()
initializeMenus()
createMenus()
createButtons()
# In-place editing button state sync
self.iface.currentLayerChanged.connect(self.sync_in_place_button_state)
self.iface.mapCanvas().selectionChanged.connect(self.sync_in_place_button_state)
self.iface.actionToggleEditing().triggered.connect(partial(self.sync_in_place_button_state, None))
self.sync_in_place_button_state()
def sync_in_place_button_state(self, layer=None):
"""Synchronise the button state with layer state"""
if layer is None:
layer = self.iface.activeLayer()
old_enabled_state = self.editInPlaceAction.isEnabled()
new_enabled_state = layer is not None and layer.type() == QgsMapLayerType.VectorLayer
self.editInPlaceAction.setEnabled(new_enabled_state)
if new_enabled_state != old_enabled_state:
self.toolbox.set_in_place_edit_mode(new_enabled_state and self.editInPlaceAction.isChecked())
def openProcessingOptions(self):
self.iface.showOptionsDialog(self.iface.mainWindow(), currentPage='processingOptions')
def unload(self):
self.toolbox.setVisible(False)
self.iface.removeDockWidget(self.toolbox)
self.iface.attributesToolBar().removeAction(self.toolboxAction)
self.resultsDock.setVisible(False)
self.iface.removeDockWidget(self.resultsDock)
self.toolbox.deleteLater()
self.menu.deleteLater()
# also delete temporary help files
folder = tempHelpFolder()
if QDir(folder).exists():
shutil.rmtree(folder, True)
self.iface.unregisterMainWindowAction(self.toolboxAction)
self.iface.unregisterMainWindowAction(self.modelerAction)
self.iface.unregisterMainWindowAction(self.historyAction)
self.iface.unregisterMainWindowAction(self.resultsAction)
self.iface.unregisterOptionsWidgetFactory(self.options_factory)
self.iface.deregisterLocatorFilter(self.locator_filter)
self.iface.deregisterLocatorFilter(self.edit_features_locator_filter)
self.iface.unregisterCustomDropHandler(self.drop_handler)
QgsApplication.dataItemProviderRegistry().removeProvider(self.item_provider)
removeButtons()
removeMenus()
Processing.deinitialize()
def openToolbox(self, show):
self.toolbox.setUserVisible(show)
def toolboxVisibilityChanged(self, visible):
self.toolboxAction.setChecked(visible)
def openModeler(self):
dlg = ModelerDialog.create()
dlg.update_model.connect(self.updateModel)
dlg.show()
def updateModel(self):
model_provider = QgsApplication.processingRegistry().providerById('model')
model_provider.refreshAlgorithms()
def openResults(self):
if self.resultsDock.isVisible():
self.resultsDock.hide()
else:
self.resultsDock.show()
def openHistory(self):
dlg = HistoryDialog()
dlg.exec_()
def tr(self, message):
return QCoreApplication.translate('ProcessingPlugin', message)
def editSelected(self, enabled):
self.toolbox.set_in_place_edit_mode(enabled)
| gpl-2.0 | 4,166,624,066,782,921,000 | 38.554318 | 114 | 0.661549 | false | 4.400372 | false | false | false |
apg/phizer | phizer/cache.py | 1 | 1726 | try:
from collections import OrderedDict
except ImportError:
from phizer.ordereddict import OrderedDict
import operator
import sys
from collections import namedtuple
cached_image = namedtuple('cached_image',
['body', 'content_type', 'size'])
class SizedLRUCache(object):
def __init__(self, max_size=None):
self._max_size = max_size
self._current_size = 0
self._cache = OrderedDict()
def get(self, key):
value = self._cache.pop(key)
self._cache[key] = value
return value
def put(self, key, value):
self._cache.pop(key, None)
self._update_current_size(value)
if self._current_size > self._max_size:
self._purge()
self._cache[key] = value
def delete(self, key):
_ = self._cache.pop(key)
self._update_current_size(value, operator.sub)
def touch(self, key):
"""'uses' item at key, thereby making it recently used
"""
value = self._cache.pop(key)
self._cache[key] = value
@property
def size(self):
return self._current_size
def _update_current_size(self, value, f=operator.add):
self._current_size = f(self._current_size, sys.getsizeof(value))
def __len__(self):
"""Returns the number of items in the cache"""
return len(self._cache)
def _purge(self):
"""Purges least recently used items until less than `max_size`
"""
if self._max_size is None:
return
while self._current_size > self._max_size and len(self) > 0:
key, value = self._cache.popitem(True)
self._update_current_size(value, operator.sub)
| gpl-3.0 | 3,875,298,408,586,498,600 | 25.553846 | 72 | 0.588065 | false | 3.861298 | false | false | false |
ethz-asl/segmatch | tools/remap_bag.py | 1 | 1995 | #!/usr/bin/python
import rospy
import rosbag
import os
import sys
import argparse
import yaml
def remove_tf(inbag,outbag,prefix):
print ' Processing input bagfile: ', inbag
print ' Writing to output bagfile: ', outbag
print ' Adding prefix: ', prefix
outbag = rosbag.Bag(outbag,'w')
for topic, msg, t in rosbag.Bag(inbag,'r').read_messages():
if topic == "/tf":
new_transforms = []
for transform in msg.transforms:
if transform.header.frame_id[0] == '/':
transform.header.frame_id = prefix + transform.header.frame_id
else:
transform.header.frame_id = prefix + '/' + transform.header.frame_id
if transform.child_frame_id[0] == '/':
transform.child_frame_id = prefix + transform.child_frame_id
else:
transform.child_frame_id = prefix + '/' + transform.child_frame_id
new_transforms.append(transform)
msg.transforms = new_transforms
else:
try:
if msg.header.frame_id[0] == '/':
msg.header.frame_id = prefix + msg.header.frame_id
else:
msg.header.frame_id = prefix + '/' + msg.header.frame_id
except:
pass
if topic[0] == '/':
topic = prefix + topic
else:
topic = prefix + '/' + topic
outbag.write(topic, msg, t)
print 'Closing output bagfile and exit...'
outbag.close();
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='removes all transforms from the /tf topic that contain one of the given frame_ids in the header as parent or child.')
parser.add_argument('-i', metavar='INPUT_BAGFILE', required=True, help='input bagfile')
parser.add_argument('-o', metavar='OUTPUT_BAGFILE', required=True, help='output bagfile')
parser.add_argument('-p', metavar='PREFIX', required=True, help='prefix to add to the frame ids')
args = parser.parse_args()
try:
remove_tf(args.i,args.o,args.p)
except Exception, e:
import traceback
traceback.print_exc() | bsd-3-clause | -5,955,480,741,586,399,000 | 30.68254 | 136 | 0.637093 | false | 3.487762 | false | false | false |
AshwinChandlapur/robocomp | tools/rcmonitor/examples/rois.py | 5 | 2917 | # -*- coding: utf-8 -*-
# Copyright (C) 2010 by RoboLab - University of Extremadura
#
# This file is part of RoboComp
#
# RoboComp is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RoboComp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with RoboComp. If not, see <http://www.gnu.org/licenses/>.
#
import Ice, sys, math, traceback
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qt import *
class C(QWidget):
def __init__(self, endpoint, modules):
QWidget.__init__(self)
self.ic = Ice.initialize(sys.argv)
self.mods = modules
self.prx = self.ic.stringToProxy(endpoint)
self.proxy = self.mods['RoboCompRoimant'].RoimantPrx.checkedCast(self.prx)
self.roiList = []
self.job()
def job(self):
# Remote procedure call
output = self.proxy.getROIList()
# Set class copies
self.roiList = output[0]
self.bState = output[1].bState
# Print number of ROIs
print len(self.roiList)
def paintEvent(self, event=None):
xOff = self.width()/2.
yOff = self.height()/2.
xPos = 0
yPos = 0
div = 20.
painter = QPainter(self)
painter.setRenderHint(QPainter.Antialiasing, True)
# Draw grid
for i in range(max(xOff,yOff)/30):
x = 30*i
painter.drawLine(xOff+x, 0, xOff+x, self.height())
painter.drawLine(xOff-x, 0, xOff-x, self.height())
painter.drawLine(0, yOff+x, self.width(), yOff+x)
painter.drawLine(0, yOff-x, self.width(), yOff-x)
# Draw ROIs
painter.setPen(Qt.red)
painter.setBrush(Qt.red)
for roi in self.roiList:
if not roi.casado: continue
try:
xPos = int((roi.z3D/div)+xOff-3)
yPos = int((roi.x3D/div)+yOff-3)
except:
pass
if type(xPos) == type(yPos) and type(xPos) == type(int()):
try:
painter.drawEllipse(int(xPos), int(yPos), 6, 6)
except:
print 'ROI :-(', int(xPos)
print type(xPos)
print type(int(xPos))
print roi.x3D, roi.x3D
traceback.print_stack()
# Draw base
painter.setPen(Qt.blue)
painter.setBrush(Qt.blue)
try:
xPos = int( (self.bState.z/div)+xOff-9)
yPos = int( (self.bState.x/div)+yOff-9)
start = int(((-self.bState.alfa*180/math.pi)-180-20)*16)
except:
pass
if type(xPos) == type(yPos) and type(xPos) == type(start) and type(xPos) == type(int()):
try:
painter.drawPie(xPos, yPos, 18, 18, start, 20*2*16)
except:
print 'BASE :-('
print type(xPos-7)
print self.bState.z, self.bState.x, self.bState.alfa
painter.end()
painter = None
| gpl-3.0 | -6,665,536,062,208,809,000 | 29.072165 | 90 | 0.66541 | false | 2.698427 | false | false | false |
willametteuniversity/webcirc2 | website/tests/registration_tests.py | 1 | 10731 | '''
This file contains tests that test the new user registration system.
'''
from os.path import abspath, dirname
import sys
project_dir = abspath(dirname(dirname(__file__)))
sys.path.insert(0, project_dir)
from django.core.urlresolvers import resolve
from django.test import TestCase
from django.test.client import Client
from django.http import HttpRequest
# Import all of our views for testing
from website.views.views import *
# Same for models
from website.models import *
from django.contrib.auth.models import User
class RegistrationFormTests(TestCase):
def test_register_new_user_url_resolves_to_new_user_view(self):
'''
This tests that the register new user URL resolves to the proper
function.
'''
found = resolve(u'/registerNewUser/')
self.assertEqual(found.func, registerNewUser)
def test_register_button_returns_correct_form(self):
'''
This tests that the register button returns a form containing
at least the proper form inputs.
'''
request = HttpRequest()
response = registerNewUser(request)
# Making sure the form title is there, and that it at least has all
# proper input fields and registration button.
self.assertIn(u'Operator Registration Form', response.content)
self.assertIn(u'inputUsername', response.content)
self.assertIn(u'inputEmail', response.content)
self.assertIn(u'inputPassword', response.content)
self.assertIn(u'inputConfirmPassword', response.content)
self.assertIn(u'submitRegistrationBtn', response.content)
def test_register_new_user(self):
'''
This runs some related tests in sequence because we need the created user from the first
function to be in the database to test the subsequent functions.
'''
self.submit_registration_creates_new_user()
self.register_new_operator_with_existing_username_fails()
self.register_new_operator_with_existing_username_but_different_case_fails()
self.register_new_operator_with_existing_email_fails()
def submit_registration_creates_new_user(self):
'''
This simulates a POST request to create a new user and checks that the URL is good
and that we get back expected responses.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
# Let's make sure it created the User in the database...
testUser = User.objects.get(username=u'testuser')
self.assertEqual(testUser.username, u'testuser')
self.assertEqual(testUser.email, u'[email protected]')
# Make sure the function returns a valid response
self.assertEqual(200, response.status_code)
# Now let's check that the server returned some HTML with a success message
self.assertIn(u'succeeded', response.content)
def register_new_operator_with_existing_username_fails(self):
'''
This attempts to register a user with the username of an already existing user, namely
the testuser from the test above. It should fail and provide an error message.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def register_new_operator_with_existing_username_but_different_case_fails(self):
'''
This attempts to register a user with the username of an already existing user, namely
the testuser from the test above, but with a different case. It should fail and provide
an error message.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testUser',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def register_new_operator_with_existing_email_fails(self):
'''
This attempts to register a user with a valid username, but with an already existing
e-mail, namely from the test above. It should fail and provide an error message.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser1',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_new_operator_with_mismatched_passwords_fails(self):
'''
This attempts to create a user with a password and confirm password that
do not match.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword1'})
self.assertIn(u'failed', response.content)
def test_too_long_username_in_registration_fails(self):
'''
This attempts to test what happens when the user tries to register a username that is too long.abspath
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'thisisaverylongusernamethatshouldnotbeallowed',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_username_with_invalid_characters_fails(self):
'''
This attempts to register a username with invalid characters. It should not let the user
register and provide an error message.
'''
# Cases to test:
# 1. Username contains one or more spaces
# 2. Username contains non-alphanumeric characters
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser!',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
response = c.post(u'/registerNewUser/', {u'username': u'testuser@',
u'email': u'testuser@@nothing.com',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
response = c.post(u'/registerNewUser/', {u'username': u'testuser$',
u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
response = c.post(u'/registerNewUser/', {u'username': u'test user',
u'email': u'test [email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_registration_without_username_fails(self):
'''
This attempts to register without a username.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'email': u'[email protected]',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_registration_without_email_fails(self):
'''
This attempts to register without an e-mail.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'password': u'testpassword',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_registration_without_password_fails(self):
'''
This tests registration without sending a password
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'confirmPassword': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_registration_without_confirm_password_fails(self):
'''
This tests registration without sending a password confirmation.
'''
c = Client()
response = c.post(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'password': u'testpassword'})
self.assertIn(u'failed', response.content)
def test_registration_get_returns_501(self):
'''
This tests that making a GET request to /registerNewUser/ returns a proper 501 page/status
'''
c = Client()
response = c.get(u'/registerNewUser/', {u'username': u'testuser',
u'email': u'[email protected]',
u'password': u'testpassword'})
self.assertEqual(501, response.status_code)
| apache-2.0 | 774,319,112,822,900,600 | 44.088235 | 120 | 0.546641 | false | 4.803491 | true | false | false |
expfactory/expfactory | expfactory/utils.py | 1 | 8093 | """
utils.py: part of expfactory package
Copyright (c) 2017-2021, Vanessa Sochat
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import errno
from subprocess import Popen, PIPE, STDOUT
from expfactory.logger import bot
import shutil
import json
import tempfile
import sys
import os
import re
################################################################################
# io utils
################################################################################
def get_installdir():
return os.path.dirname(os.path.abspath(__file__))
def get_templatedir():
base = get_installdir()
return "%s/templates" % (base)
def get_viewsdir(base=None):
"""views might be written to a secondary expfactory install, which can
be specified with base"""
if base is None:
base = get_installdir()
return "%s/views" % (base)
def find_subdirectories(basepath):
"""
Return directories (and sub) starting from a base
"""
directories = []
for root, dirnames, filenames in os.walk(basepath):
new_directories = [d for d in dirnames if d not in directories]
directories = directories + new_directories
return directories
def find_directories(root, fullpath=True):
"""
Return directories at one level specified by user
(not recursive)
"""
directories = []
for item in os.listdir(root):
# Don't include hidden directories
if not re.match("^[.]", item):
if os.path.isdir(os.path.join(root, item)):
if fullpath:
directories.append(os.path.abspath(os.path.join(root, item)))
else:
directories.append(item)
return directories
def copy_directory(src, dest, force=False):
"""Copy an entire directory recursively"""
if os.path.exists(dest) and force is True:
shutil.rmtree(dest)
try:
shutil.copytree(src, dest)
except OSError as e:
# If the error was caused because the source wasn't a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
bot.error("Directory not copied. Error: %s" % e)
sys.exit(1)
def mkdir_p(path):
"""mkdir_p attempts to get the same functionality as mkdir -p
:param path: the path to create.
"""
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
bot.error("Error creating path %s, exiting." % path)
sys.exit(1)
def clone(url, tmpdir=None):
"""clone a repository from Github"""
if tmpdir is None:
tmpdir = tempfile.mkdtemp()
name = os.path.basename(url).replace(".git", "")
dest = "%s/%s" % (tmpdir, name)
return_code = os.system("git clone %s %s" % (url, dest))
if return_code == 0:
return dest
bot.error("Error cloning repo.")
sys.exit(return_code)
def run_command(cmd):
"""run_command uses subprocess to send a command to the terminal.
:param cmd: the command to send, should be a list for subprocess
"""
output = Popen(cmd, stderr=STDOUT, stdout=PIPE)
t = output.communicate()[0], output.returncode
output = {"message": t[0], "return_code": t[1]}
return output
################################################################################
# templates
################################################################################
def get_template(name, base=None):
"""read in and return a template file"""
# If the file doesn't exist, assume relative to base
template_file = name
if not os.path.exists(template_file):
if base is None:
base = get_templatedir()
template_file = "%s/%s" % (base, name)
# Then try again, if it still doesn't exist, bad name
if os.path.exists(template_file):
with open(template_file, "r") as filey:
template = "".join(filey.readlines())
return template
bot.error("%s does not exist." % template_file)
def sub_template(template, template_tag, substitution):
"""make a substitution for a template_tag in a template"""
template = template.replace(template_tag, substitution)
return template
def save_template(output_file, snippet, mode="w", base=None):
if base is None:
base = get_templatedir()
with open(output_file, mode) as filey:
filey.writelines(snippet)
return output_file
################################################################################
# JSON
################################################################################
def read_json(filename, mode="r"):
with open(filename, mode) as filey:
data = json.load(filey)
return data
def write_json(json_obj, filename, mode="w"):
with open(filename, mode) as filey:
filey.write(
json.dumps(json_obj, sort_keys=True, indent=4, separators=(",", ": "))
)
return filename
def read_file(filename, mode="r"):
with open(filename, mode) as filey:
data = filey.read()
return data
def write_file(filename, content, mode="w"):
with open(filename, mode) as filey:
filey.writelines(content)
return filename
def get_post_fields(request):
"""parse through a request, and return fields from post in a dictionary"""
fields = dict()
for field, value in request.form.items():
fields[field] = value
return fields
################################################################################
# environment / options
################################################################################
def convert2boolean(arg):
"""convert2boolean is used for environmental variables
that must be returned as boolean"""
if not isinstance(arg, bool):
return arg.lower() in ("yes", "true", "t", "1", "y")
return arg
def getenv(variable_key, default=None, required=False, silent=True):
"""getenv will attempt to get an environment variable. If the variable
is not found, None is returned.
:param variable_key: the variable name
:param required: exit with error if not found
:param silent: Do not print debugging information for variable
"""
variable = os.environ.get(variable_key, default)
if variable is None and required:
bot.error("Cannot find environment variable %s, exiting." % variable_key)
sys.exit(1)
if not silent:
if variable is not None:
bot.verbose2("%s found as %s" % (variable_key, variable))
else:
bot.verbose2("%s not defined (None)" % variable_key)
return variable
| bsd-3-clause | -5,226,687,665,466,102,000 | 30.737255 | 82 | 0.612628 | false | 4.341738 | false | false | false |
oogles/django-goodies | djem/forms/bases.py | 2 | 1058 | from django import forms
class CommonInfoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
user = kwargs.pop('user', None)
super(CommonInfoForm, self).__init__(*args, **kwargs)
# User is required for bound forms
if self.is_bound and not user:
raise TypeError('Bound {0} instances require a "user" argument.'.format(self.__class__.__name__))
# Set self.user regardless of whether or not the form is bound. Child
# forms may well require the user when the form is unbound as well.
self.user = user
def save(self, commit=True):
# Call super.save() with commit=False so .save() can be called on the
# instance with the required user argument
instance = super(CommonInfoForm, self).save(commit=False)
if commit:
instance.save(self.user)
self.save_m2m()
del self.save_m2m # pretend commit=False was never used
return instance
| bsd-3-clause | 1,173,975,838,051,884,500 | 33.129032 | 109 | 0.584121 | false | 4.483051 | false | false | false |
showerst/openstates | scripts/setup_openstates_ubuntu.py | 1 | 3711 | '''
This script sets up a virtualenv with openstates on ubunt.
usage: python setup_openstates_ubuntu.py myvirtualenv [whenIputmycode]
If you don't specify a second argument, the code goes in the virtualenv.
'''
import sys
import os
from os import chdir as cd
from os.path import join, abspath
import subprocess
import logging
# Logging config
logger = logging.getLogger('[openstates-installer]')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
formatter = logging.Formatter('%(name)s %(asctime)s - %(message)s',
datefmt='%H:%M:%S')
ch.setFormatter(formatter)
logger.addHandler(ch)
packages = {
# The packages are required for use of lxml and git.
'core': '''
libxml2-dev
python-dev
libxslt1-dev
git'''.split(),
}
# ---------------------------------------------------------------------------
# Utility functions
def run(command, check=False):
logger.info('running "%s"' % command)
if check:
return subprocess.check_output(command, shell=True)
else:
subprocess.call(command, shell=True)
def run_each(*commands):
for c in commands:
run(c)
def package_install(package, update=False):
"""Installs the given package/list of package, optionnaly updating
the package database."""
if update:
run("sudo apt-get --yes update")
if type(package) in (list, tuple):
package = " ".join(package)
run("sudo apt-get --yes install %s" % (package))
def package_ensure(package):
"""Tests if the given package is installed, and installes it in
case it's not already there. Loosely stolen from cuisine."""
cmd = "dpkg-query -W -f='${Status}' %s ; true"
status = run(cmd % package, check=True)
if status.find("not-installed") != -1 or status.find("installed") == -1:
package_install(package)
return False
else:
return True
def create_virtualenv(ENV):
'Create the virtualenv.'
run_each(
('wget -nc http://pypi.python.org/packages/source/v/virtualenv'
'/virtualenv-1.7.tar.gz#md5=dcc105e5a3907a9dcaa978f813a4f526'),
'tar -zxvf virtualenv-1.7.tar.gz ',
'python virtualenv-1.7/virtualenv.py %s' % ENV,
)
def gitclone(repo, setup_arg='install'):
cd(CODE)
# Clone the code.
run('git clone %s' % repo)
# Install requirements.
_, folder = os.path.split(repo)
folder, _ = os.path.splitext(folder)
requirements = join(CODE, folder, 'requirements.txt')
try:
with open(requirements):
pass
except IOError:
pass
else:
run('%s install -r %s' % (pip, requirements))
# Setup.
cd(folder)
run('%s setup.py %s' % (python, setup_arg))
def setup_openstates():
for package in packages['core']:
package_ensure(package)
create_virtualenv(ENV)
# Get openstates.
gitclone('git://github.com/sunlightlabs/openstates.git')
# Uninstall billy.
run('%s uninstall billy' % pip)
# Clone billy, get requirements, and run setup.py develop
gitclone('git://github.com/sunlightlabs/billy.git', 'develop')
def setup_mysql():
package_ensure('mysql-server')
run("sudo apt-get build-dep python-mysqldb")
run("pip install MySQL-python")
if __name__ == "__main__":
try:
ENV, CODE = map(abspath, sys.argv[1:3])
except ValueError:
ENV = CODE = abspath(sys.argv[1])
for path in [ENV, CODE]:
try:
os.makedirs(ENV)
os.makedirs(CODE)
except OSError:
pass
pip = join(ENV, 'bin', 'pip')
python = join(ENV, 'bin', 'python')
setup_openstates()
| gpl-3.0 | 2,655,673,413,212,256,000 | 23.414474 | 77 | 0.606575 | false | 3.606414 | false | false | false |
lmazuel/azure-sdk-for-python | azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/models/update_system_services_response.py | 2 | 1875 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class UpdateSystemServicesResponse(Model):
"""Response of the update system services API.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar update_status: Update status. Possible values include: 'Unknown',
'Updating', 'Creating', 'Deleting', 'Succeeded', 'Failed', 'Canceled'
:vartype update_status: str or
~azure.mgmt.machinelearningcompute.models.OperationStatus
:ivar update_started_on: The date and time when the last system services
update was started.
:vartype update_started_on: datetime
:ivar update_completed_on: The date and time when the last system services
update completed.
:vartype update_completed_on: datetime
"""
_validation = {
'update_status': {'readonly': True},
'update_started_on': {'readonly': True},
'update_completed_on': {'readonly': True},
}
_attribute_map = {
'update_status': {'key': 'updateStatus', 'type': 'str'},
'update_started_on': {'key': 'updateStartedOn', 'type': 'iso-8601'},
'update_completed_on': {'key': 'updateCompletedOn', 'type': 'iso-8601'},
}
def __init__(self):
super(UpdateSystemServicesResponse, self).__init__()
self.update_status = None
self.update_started_on = None
self.update_completed_on = None
| mit | 2,685,939,778,419,291,600 | 37.265306 | 80 | 0.620267 | false | 4.411765 | false | false | false |
ForTozs/py3radar | setup.py | 1 | 3447 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2012 - 2013
# Matías Herranz <[email protected]>
# Joaquín Tita <[email protected]>
#
# https://github.com/PyRadar/pyradar
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
#===============================================================================
# DOCS
#===============================================================================
"""This file is for distribute pyradar with setuptools
"""
#===============================================================================
# IMPORTS
#===============================================================================
import sys
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
import pyradar
#===============================================================================
# CONSTANTS
#===============================================================================
PYPI_REQUIRE = [
"Pillow",
"numpy",
"matplotlib",
"scipy"
]
MANUAL_REQUIRE = {
"gdal" : "http://gdal.org/",
}
# sugerido pero no importante
SUGESTED = {
}
#===============================================================================
# WARNINGS FOR MANUAL REQUIRES AND SUGGESTED
#===============================================================================
def validate_modules(requires):
not_found = []
for name, url in list(requires.items()):
try:
__import__(name)
except ImportError:
not_found.append("{} requires '{}' ({})".format(pyradar.PRJ,
name, url))
return not_found
def print_not_found(not_found, msg):
limits = "=" * max(list(map(len, not_found)))
print(("\n{}\n{}\n{}\n{}\n".format(msg, limits, "\n".join(not_found), limits)))
not_found = validate_modules(MANUAL_REQUIRE)
if not_found:
print_not_found(not_found, "ERROR")
sys.exit(1)
not_found = validate_modules(SUGESTED)
if not_found:
print_not_found(not_found, "WARNING")
#===============================================================================
# FUNCTIONS
#===============================================================================
setup(
name=pyradar.PRJ.lower(),
version=pyradar.STR_VERSION,
description=pyradar.SHORT_DESCRIPTION,
author=pyradar.AUTHOR,
author_email=pyradar.EMAIL,
url=pyradar.URL,
license=pyradar.LICENSE,
keywords=pyradar.KEYWORDS,
classifiers=pyradar.CLASSIFIERS,
packages=[pkg for pkg in find_packages() if pkg.startswith("pyradar")],
include_package_data=True,
package_data={
'ExampleImages': ['pyradar/simulate/ExampleImages/*'],
'DemoSet' : ['pyradar/simulate/DemoSet/*'],
},
py_modules=["ez_setup"],
install_requires=PYPI_REQUIRE,
)
| lgpl-3.0 | -7,754,382,193,126,658,000 | 27.708333 | 83 | 0.509144 | false | 4.206349 | false | false | false |
partofthething/home-assistant | homeassistant/components/freebox/config_flow.py | 1 | 3656 | """Config flow to configure the Freebox integration."""
import logging
from freebox_api.exceptions import AuthorizationError, HttpRequestError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_HOST, CONF_PORT
from .const import DOMAIN # pylint: disable=unused-import
from .router import get_api
_LOGGER = logging.getLogger(__name__)
class FreeboxFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize Freebox config flow."""
self._host = None
self._port = None
def _show_setup_form(self, user_input=None, errors=None):
"""Show the setup form to the user."""
if user_input is None:
user_input = {}
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_HOST, default=user_input.get(CONF_HOST, "")): str,
vol.Required(CONF_PORT, default=user_input.get(CONF_PORT, "")): int,
}
),
errors=errors or {},
)
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is None:
return self._show_setup_form(user_input, errors)
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
# Check if already configured
await self.async_set_unique_id(self._host)
self._abort_if_unique_id_configured()
return await self.async_step_link()
async def async_step_link(self, user_input=None):
"""Attempt to link with the Freebox router.
Given a configured host, will ask the user to press the button
to connect to the router.
"""
if user_input is None:
return self.async_show_form(step_id="link")
errors = {}
fbx = await get_api(self.hass, self._host)
try:
# Open connection and check authentication
await fbx.open(self._host, self._port)
# Check permissions
await fbx.system.get_config()
await fbx.lan.get_hosts_list()
await self.hass.async_block_till_done()
# Close connection
await fbx.close()
return self.async_create_entry(
title=self._host,
data={CONF_HOST: self._host, CONF_PORT: self._port},
)
except AuthorizationError as error:
_LOGGER.error(error)
errors["base"] = "register_failed"
except HttpRequestError:
_LOGGER.error("Error connecting to the Freebox router at %s", self._host)
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
"Unknown error connecting with Freebox router at %s", self._host
)
errors["base"] = "unknown"
return self.async_show_form(step_id="link", errors=errors)
async def async_step_import(self, user_input=None):
"""Import a config entry."""
return await self.async_step_user(user_input)
async def async_step_zeroconf(self, discovery_info: dict):
"""Initialize flow from zeroconf."""
host = discovery_info["properties"]["api_domain"]
port = discovery_info["properties"]["https_port"]
return await self.async_step_user({CONF_HOST: host, CONF_PORT: port})
| mit | -216,270,256,415,174,750 | 31.353982 | 88 | 0.594365 | false | 4.14983 | true | false | false |
Sevenops/TurkAnime-Downloader | turkanime.py | 1 | 2485 | import os
import re
import httpx
import base64
from urllib.parse import unquote
import hashlib
cookies = {
"yew490": "1",
"_ga": "GA1.2.284686093.1564216182",
"_gid": "GA1.2.1256976049.1564216182",
"__PPU_SESSION_1_1683592_false": "1564216202929|1|1564216202929|1|1",
"_gat": "1",
}
headers = {
"Connection": "keep-alive",
"Cache-Control": "max-age=0",
"Origin": "http://www.turkanime.net",
"Upgrade-Insecure-Requests": "1",
"Content-Type": "application/x-www-form-urlencoded",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
"Referer": "http://www.turkanime.net/",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "tr-TR,tr;q=0.9,en-US;q=0.8,en;q=0.7",
}
class TurkAnime:
url = "http://www.turkanime.net"
def anime_ara(self, ara):
data = {"arama": ara}
veri = httpx.post(
self.url + "/arama", headers=headers, cookies=cookies, data=data
).content.decode("utf-8")
liste = []
r = re.findall(
'<div class="panel-ust-ic"><div class="panel-title"><a href="\/\/www\.turkanime\.net\/anime\/(.*?)" (.*?)>(.*?)<\/a>',
veri,
)
for slug, _, title in r:
liste.append([title, slug])
if len(liste) == 0:
try:
slug = veri.split('window.location = "anime/')[1].split('"')[0]
liste.append([ara, slug])
except:
pass
return liste
def bolumler(self, slug):
veri = httpx.get(
self.url + "/anime/" + slug, headers=headers, cookies=cookies
).content.decode("utf8")
h = headers.copy()
h.update({"X-Requested-With": "XMLHttpRequest", "Accept": "*/*"})
animeId = veri.split("ajax/bolumler&animeId=")[1].split('"')[0]
liste = []
a = httpx.get(
f"http://www.turkanime.net/ajax/bolumler&animeId={animeId}",
headers=h,
cookies=cookies,
).content.decode("utf8")
r = re.findall(
'<a href="\/\/www\.turkanime\.net\/video\/(.*?)" (.*?)><span class="bolumAdi">(.*?)<\/span><\/a>',
a,
)
for slug, _, title in r:
liste.append([title, slug])
return liste | apache-2.0 | 995,729,462,240,999,400 | 31.285714 | 135 | 0.543662 | false | 3.008475 | false | false | false |
dpwe/dp_python | dp.py | 1 | 2187 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
%matplotlib inline
# <codecell>
cd /Users/dpwe/projects/millionsong/python/midi-dataset
# <codecell>
import numpy as np
import matplotlib.pyplot as plt
import dpcore
# <codecell>
reload(dpcore)
# <codecell>
M = np.random.rand(50,50)
plt.imshow(M, interpolation='none', cmap='binary')
# <codecell>
%timeit DC, phiC = dpcore.dpcore(M, 0.2, True)
%timeit DP, phiP = dpcore.dpcore(M, 0.2, False)
# <codecell>
DC, phiC = dpcore.dpcore(M, 0.2, True)
DP, phiP = dpcore.dpcore(M, 0.2, False)
# <codecell>
plt.imshow(DC,interpolation='none')
# <codecell>
plt.imshow(DC-DP, interpolation='none')
print np.max(np.abs(DC-DP))
# <codecell>
plt.imshow(phiC-phiP, interpolation='none')
# <codecell>
MM = np.random.rand(5, 5)
pen = 0.2
gut = 0.3
p,q,C,phi = dpcore.dp(MM, pen, gut)
print p, q
print MM
print C
print "best cost =", C[p[-1],q[-1]], "=", np.sum(MM[p, q])+pen*(np.sum(phi[p, q]>0))
plt.imshow(MM, interpolation='none', cmap='binary')
plt.hold(True)
plt.plot(q,p,'-r')
plt.hold(False)
plt.show()
# <codecell>
M2 = np.copy(M)
M2[20:30,20:30] += np.random.rand(10,10)
M2[10:40,10:40] += np.random.rand(30,30)
plt.imshow(M2, interpolation='none', cmap='binary')
p,q,C,phi = dpcore.dp(M2,0.1,0.1)
plt.hold(True)
plt.plot(q,p,'-r')
plt.hold(False)
plt.show()
# <codecell>
import librosa
# <codecell>
# Mirror matlab example from http://www.ee.columbia.edu/ln/rosa/matlab/dtw/
d1, sr = librosa.load('/Users/dpwe/projects/dtw/sm1_cln.wav', sr=16000)
d2, sr = librosa.load('/Users/dpwe/projects/dtw/sm2_cln.wav', sr=16000)
D1 = librosa.stft(d1, n_fft=512, hop_length=128)
D2 = librosa.stft(d2, n_fft=512, hop_length=128)
librosa.display.specshow(20*np.log10(np.abs(D1)), sr=sr, hop_length=128)
# <codecell>
# Cosine similarity matrix (slow one-liner)
SM = np.array([[np.sum(a*b)/np.sqrt(np.sum(a**2)*np.sum(b**2)) for b in np.abs(D2.T)] for a in np.abs(D1.T)])
# <codecell>
plt.imshow(SM)
# <codecell>
p, q, C, phi = dpcore.dp(1-SM)
# <codecell>
plt.imshow(SM, interpolation='none', cmap='binary')
plt.hold(True)
plt.plot(q,p,'-r')
plt.hold(False)
plt.show()
# <codecell>
C[-1,-1]
# <codecell>
| mit | -7,173,430,211,191,840,000 | 17.533898 | 109 | 0.660265 | false | 2.163205 | false | false | false |
QuantCrimAtLeeds/PredictCode | open_cp/gui/run_analysis.py | 1 | 17795 | """
run_analysis
~~~~~~~~~~~~
Model / Controller for running the actual analysis.
TODO: The random `print` statements are to try to catch a rare deadlock
condition.
"""
import open_cp.gui.tk.run_analysis_view as run_analysis_view
import open_cp.gui.predictors as predictors
from open_cp.gui.common import CoordType
import open_cp.gui.predictors.predictor as predictor
import open_cp.pool as pool
import open_cp.gui.tk.threads as tk_threads
import open_cp.gui.locator as locator
import collections
import logging
import queue
import time
import datetime
class RunAnalysis():
"""Controller for performing the computational tasks of actually producing
a prediction. Using multi-processing.
:param parent: Parent `tk` widget
:param controller: The :class:`analyis.Analysis` model.
"""
def __init__(self, parent, controller):
self.view = run_analysis_view.RunAnalysisView(parent, self)
self.controller = controller
self._msg_logger = predictors.get_logger()
self._logger = logging.getLogger(__name__)
@property
def main_model(self):
"""The :class:`analysis.Model` instance"""
return self.controller.model
def run(self):
try:
self._model = RunAnalysisModel(self, self.main_model)
self._run_tasks()
except:
self._msg_logger.exception(run_analysis_view._text["genfail"])
self.view.done()
self.view.wait_window(self.view)
def cancel(self):
"""Called when we wish to cancel the running tasks"""
self._logger.warning("Analysis run being cancelled.")
self._msg_logger.warning(run_analysis_view._text["log10"])
if hasattr(self, "_off_thread"):
self._off_thread.cancel()
@staticmethod
def _chain_dict(dictionary):
for name, li in dictionary.items():
for x in li:
yield (name, x)
def _run_tasks(self):
tasks = []
for proj_name, proj in self._chain_dict(self._model.projectors):
for grid_name, grid in self._chain_dict(self._model.grids):
for pred_name, pred in self._chain_dict(self._model.grid_prediction_tasks):
task = _RunAnalysis_Task(
task = _RunAnalysis_Task._InnerTask(self.main_model, grid, proj, pred),
off_process = pred.off_process,
projection = proj_name,
grid = grid_name,
type = pred_name )
tasks.append(task)
total = len(tasks) * len(self._model.predict_tasks)
self._msg_logger.info(run_analysis_view._text["log7"], total)
self._off_thread = _RunnerThread(tasks, self._model.predict_tasks, self)
self._off_thread.force_gc()
locator.get("pool").submit(self._off_thread, self._finished)
def to_msg_logger(self, msg, *args, level=logging.DEBUG):
self._msg_logger.log(level, msg, *args)
def start_progress(self):
locator.get("pool").submit_gui_task(lambda : self.view.start_progress_bar())
def set_progress(self, done, out_of):
locator.get("pool").submit_gui_task(lambda : self.view.set_progress(done, out_of))
def end_progress(self):
locator.get("pool").submit_gui_task(lambda : self.view.stop_progress_bar())
def notify_model_message(self, msg, *args, level=logging.DEBUG):
self.to_msg_logger(msg, *args, level=level)
def _finished(self, out=None):
self.view.done()
if out is not None:
if isinstance(out, predictor.PredictionError):
self.view.alert(str(out))
self._msg_logger.error(run_analysis_view._text["warning1"].format(out))
elif isinstance(out, Exception):
self._msg_logger.error(run_analysis_view._text["log11"].format(out))
else:
self._msg_logger.error(run_analysis_view._text["log12"].format(out))
return
if self._off_thread.cancelled:
self.view.cancel()
else:
results = [PredictionResult(key, result) for (key, result) in self._off_thread.results]
result = RunAnalysisResult(results)
self.controller.new_run_analysis_result(result)
class _RunAnalysis_Task():
"""Pulled out to allow pickling"""
def __init__(self, task, off_process, projection, grid, type):
self.task = task
self.off_process = off_process
self.projection = projection
self.grid = grid
self.type = type
def __repr__(self):
return "_RunAnalysis_Task(task={}, off_process={}, projection={}, grid={}, type={})".format(
self.task, self.off_process, self.projection, self.grid, self.type)
class _InnerTask():
def __init__(self, main_model, grid, proj, pred):
# Make a copy of the :class:`DataModel` and not the extra baggage.
self.main_model = main_model.clone()
self.grid = grid
self.proj = proj
self.pred = pred
def __call__(self):
return self.pred(self.main_model, self.grid, self.proj)
class RunAnalysisResult():
def __init__(self, results):
self._results = results
self._time = datetime.datetime.now()
@property
def results(self):
"""List of :class:`PredictionResult` instances."""
return self._results
@property
def run_time(self):
""":class:`datetime` of when the result was completed."""
return self._time
def merge_all_results(results):
"""Merge an iterable of :class:`RunAnalysisResult` instances into a single
:class:`RunAnalysisResult` object."""
all_results = []
for result in results:
all_results.extend(result.results)
return RunAnalysisResult(all_results)
class PredictionResult():
"""The result of running the prediction, but not including any analysis
results.
:param key: Instance of :class:`TaskKey`
:param prediction: The result of the prediction. Slightly undefined, but
at present, should be an :class:`GridPrediction` instance.
"""
def __init__(self, key, prediction):
self._key = key
self._pred = prediction
@property
def key(self):
"""The :class:`TaskKey` describing the prediction."""
return self._key
@property
def prediction(self):
"""An instance of :class:`GridPrediction` (or most likely a subclass)
giving the actual prediction."""
return self._pred
def __repr__(self):
return "PredictionResult(key={}, prediction={}".format(self._key, self._pred)
class TaskKey():
"""Describes the prediction task which was run. We don't make any
assumptions about the components of the key (they are currently strings,
but in future may be richer objects) and don't implement custom hashing
or equality.
:param projection: The projection used.
:param grid: The grid used.
:param pred_type: The prediction algorithm (etc.) used.
:param pred_date: The prediction date.
:param pred_length: The length of the prediction.
"""
def __init__(self, projection, grid, pred_type, pred_date, pred_length):
self._projection = projection
self._grid = grid
self._pred_type = pred_type
self._pred_date = pred_date
self._pred_length = pred_length
@property
def projection(self):
return self._projection
@property
def grid(self):
return self._grid
@property
def prediction_type(self):
return self._pred_type
@property
def prediction_date(self):
return self._pred_date
@property
def prediction_length(self):
return self._pred_length
@staticmethod
def header():
"""Column representation for CSV file"""
return ["projection type", "grid type", "prediction type", "prediction date", "scoring length"]
def __iter__(self):
return iter((self.projection, self.grid, self.prediction_type,
self.prediction_date, self.prediction_length))
def __repr__(self):
return "projection: {}, grid: {}, prediction_type: {}, prediction_date: {}, prediction_length: {}".format(
self.projection, self.grid, self.prediction_type, self.prediction_date,
self.prediction_length)
class RunAnalysisModel():
"""The model for running an analysis. Constructs lists/dicts:
- :attr:`projector_tasks` Tasks to project coordinates
- :attr:`grid_tasks` Tasks to lay a grid over the data
- :attr:`predict_tasks` Pairs `(start_date, score_length)`
- :attr:`grid_pred_tasks` Instances of :class:`GridPredictorTask`
:param controller: :class:`RunAnalysis` instance
:param view: :class:`RunAnalysisView` instance
:param main_model: :class:`analysis.Model` instance
"""
def __init__(self, controller, main_model):
self.controller = controller
self.main_model = main_model
self._build_projectors()
self._build_grids()
self._build_date_ranges()
self._build_grid_preds()
def _build_grid_preds(self):
self._grid_pred_tasks = dict()
for pred in self.predictors.predictors_of_type(predictors.predictor._TYPE_GRID_PREDICTOR):
self._grid_pred_tasks[pred.pprint()] = pred.make_tasks()
self.controller.notify_model_message(run_analysis_view._text["log1"],
sum( len(li) for li in self._grid_pred_tasks.values() ),
level=logging.INFO)
def _build_date_ranges(self):
self._predict_tasks = []
for top in self.comparators.comparators_of_type(predictors.comparitor.TYPE_TOP_LEVEL):
self._predict_tasks.extend(top.run())
self.controller.notify_model_message(run_analysis_view._text["log2"],
len(self.predict_tasks), level=logging.INFO)
if len(self.predict_tasks) > 0:
self.controller.notify_model_message(run_analysis_view._text["log3"],
self.predict_tasks[0][0].strftime(run_analysis_view._text["dtfmt"]),
level=logging.INFO)
self.controller.notify_model_message(run_analysis_view._text["log4"],
self.predict_tasks[-1][0].strftime(run_analysis_view._text["dtfmt"]),
level=logging.INFO)
@property
def predict_tasks(self):
"""List of pairs `(start_date, length)`"""
return self._predict_tasks
def _build_grids(self):
self._grid_tasks = dict()
for grid in self.predictors.predictors_of_type(predictors.predictor._TYPE_GRID):
tasks = grid.make_tasks()
self._grid_tasks[grid.pprint()] = tasks
self.controller.notify_model_message(run_analysis_view._text["log5"],
sum( len(li) for li in self._grid_tasks.values() ), level=logging.INFO )
def _build_projectors(self):
if self.main_model.coord_type == CoordType.XY:
projector = predictors.lonlat.PassThrough(self.main_model)
projectors = [projector]
else:
projectors = list(self.predictors.predictors_of_type(
predictors.predictor._TYPE_COORD_PROJ))
count = 0
self._projector_tasks = dict()
for projector in projectors:
tasks = projector.make_tasks()
self._projector_tasks[projector.pprint()] = tasks
count += len(tasks)
self.controller.notify_model_message(run_analysis_view._text["log6"],
count, level=logging.INFO)
@property
def grid_prediction_tasks(self):
"""Dictionary from string name to task(s)."""
return self._grid_pred_tasks
@property
def grids(self):
"""Dictionary from string name to task(s)."""
return self._grid_tasks
@property
def projectors(self):
"""Dictionary from string name to task(s)."""
return self._projector_tasks
@property
def predictors(self):
return self.main_model.analysis_tools_model
@property
def comparators(self):
return self.main_model.comparison_model
class BaseRunner():
"""Abstract base class which runs "tasks" and communicates with a
"controller" to show progress.
"""
def __init__(self, controller):
self._executor = pool.PoolExecutor()
self._results = []
self._controller = controller
self._cancel_queue = queue.Queue()
def __call__(self):
"""To be run off the main GUI thread"""
self._controller.start_progress()
self._controller.to_msg_logger(run_analysis_view._text["log9"])
self.executor.start()
try:
tasks = list(self.make_tasks())
self._controller.to_msg_logger(run_analysis_view._text["log13"])
futures = [ self.executor.submit(t) for t in tasks if t.off_process ]
on_thread_tasks = [t for t in tasks if not t.off_process]
done, out_of = 0, len(futures) + len(on_thread_tasks)
while len(futures) > 0 or len(on_thread_tasks) > 0:
if len(futures) > 0:
futures, count = self._process_futures(futures)
done += count
if len(on_thread_tasks) > 0:
task = on_thread_tasks.pop()
self._notify_result(task.key, task())
done += 1
else:
time.sleep(0.5)
self._controller.set_progress(done, out_of)
if self.cancelled:
print("Exiting...")
break
finally:
# Context management would call `shutdown` but we definitely want
# to call terminate.
print("Terminating...")
self.executor.terminate()
print("Done")
print("Ending progress...")
self._controller.end_progress()
def force_gc(self):
"""Fixes, or at least mitigates, issue #6. Call on the main GUI thread
prior to invoking `__call__`."""
import gc
gc.collect()
@property
def executor(self):
return self._executor
def _process_futures(self, futures):
results, futures = pool.check_finished(futures)
done = 0
for key, result in results:
self._notify_result(key, result)
done += 1
return futures, done
def _notify_result(self, key, result):
self._results.append( (key, result) )
self._controller.to_msg_logger(run_analysis_view._text["log8"], key)
def cancel(self):
self._cancel_queue.put("stop")
@property
def cancelled(self):
return not self._cancel_queue.empty()
@property
def results(self):
return self._results
def make_tasks(self):
"""To be over-riden in a sub-class. Should return a list of
:class:`RunPredTask` instances."""
raise NotImplementedError()
class RunPredTask(pool.Task):
"""Wraps a `key` and `task`. The task should have an attribute
:attr:`off_process` which is `True` if and only if we should run in
another process.
"""
def __init__(self, key, task):
super().__init__(key)
if task is None:
raise ValueError()
self._task = task
@property
def off_process(self):
return self._task.off_process
def __call__(self):
return self._task()
class _RunnerThread(BaseRunner):
"""Constructs the tasks to run. Essentially forms the cartesian product
of the prediction tasks with the date ranges.
The `result` will be :class:`PredictionResult` instances.
:param grid_prediction_tasks: Iterable giving callables which when run
return instances of :class:`SingleGridPredictor`.
:param predict_tasks: Iterable of pairs `(start_time, score_length)`
:param controller: The :class:`RunAnalysis` instance
"""
def __init__(self, grid_prediction_tasks, predict_tasks, controller):
super().__init__(controller)
self._tasks = list(grid_prediction_tasks)
self._date_ranges = list(predict_tasks)
def make_tasks(self):
tasks = []
futures = []
for task in self._tasks:
if task.off_process:
#raise NotImplementedError("This currently does not work due to pickling issues.")
task = self.RunPredTask(task, task.task)
futures.append(self.executor.submit(task))
else:
tasks.extend( self._make_new_task(task, task.task()) )
if len(futures) > 0:
for key, result in pool.yield_task_results(futures):
tasks.extend( self._make_new_task(key, result) )
return tasks
def _make_new_task(self, key, task):
for dr in self._date_ranges:
new_task = self.StartLengthTask(task=task, start=dr[0], length=dr[1])
k = TaskKey(projection=key.projection, grid=key.grid,
pred_type=key.type, pred_date=dr[0], pred_length=dr[1] )
yield self.RunPredTask(k, new_task)
class StartLengthTask():
def __init__(self, task, start, length):
self.start = start
self.length = length
if task is None:
raise ValueError()
self.task = task
def __call__(self):
return self.task(self.start, self.length)
@property
def off_process(self):
return self.task.off_process
| artistic-2.0 | -4,478,037,561,749,926,400 | 34.661323 | 114 | 0.599494 | false | 4.040645 | false | false | false |
geoenvo/opendims | opendims/automaticweathersystem/migrations/0004_auto_20160529_2145.py | 1 | 2278 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-05-29 14:45
from __future__ import unicode_literals
from decimal import Decimal
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('automaticweathersystem', '0003_auto_20160509_0842'),
]
operations = [
migrations.AlterField(
model_name='awsreport',
name='awsstation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automaticweathersystem.AWSStation', verbose_name='AWS station'),
),
migrations.AlterField(
model_name='awsreport',
name='day_rain',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Day rain'),
),
migrations.AlterField(
model_name='awsreport',
name='rain_rate',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0'))], verbose_name='Rain rate'),
),
migrations.AlterField(
model_name='awsreport',
name='solar_radiation',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0'))], verbose_name='Solar radiation'),
),
migrations.AlterField(
model_name='awsreport',
name='uv_index',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0'))], verbose_name='UV index'),
),
migrations.AlterField(
model_name='awsreport',
name='wind_direction',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Wind direction'),
),
migrations.AlterField(
model_name='awsreport',
name='wind_speed',
field=models.DecimalField(blank=True, decimal_places=2, max_digits=4, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0'))], verbose_name='Wind speed'),
),
]
| gpl-3.0 | -5,753,898,316,951,615,000 | 41.981132 | 194 | 0.638718 | false | 4.119349 | false | false | false |
decvalts/landlab | landlab/components/soil_moisture/soil_moisture_field.py | 1 | 8877 | #################################################################
##
## 'Field' concept is implemented for Soil Moisture component.
##
## Sai Nudurupati and Erkan Istanbulluoglu - 15May2014
#################################################################
from landlab import Component
import numpy as np
_VALID_METHODS = set(['Grid'])
def assert_method_is_valid(method):
if method not in _VALID_METHODS:
raise ValueError('%s: Invalid method name' % method)
class SoilMoisture( Component ):
"""
This component calculates and updates soil moisture after each storm. Soil
moisture is represented as a single bucket. Rainfall depth fills the bucket
and the soil moisture decays as a result of leakage and ET following the
analytical solution of Laio et al., (2001). This component can operate on
any raster grid. Input file is named soilmoisture_input.txt and is
temporarily placed under landlab.components.
Storms are considered to be instantaneous events.
Storm duration, depth and interstorm duration are obtained as input
Storm depth is obtained in mm and storm duration and interstorm duration
are obtained in hours
>>> from landlab import RasterModelGrid
>>> from landlab.components.radiation.radiation_field import Radiation
>>> from landlab.components.soil_moisture.soil_moisture_field import SoilMoisture
>>> import numpy as np
>>> grid = RasterModelGrid( 5, 4, 0.2 )
>>> grid['node']['Elevation'] = np.random.rand( grid.number_of_nodes ) * 1000
>>> rad = Radiation( grid )
>>> rad.name
'Radiation'
>>> current_time = 0.5
>>> rad.update( current_time )
>>>
"""
_name = 'Soil Moisture'
_input_var_names = set([
'VegetationCover',
'LiveLeafAreaIndex',
'PotentialEvapotranspiraton',
])
_output_var_names = set([
'WaterStress',
'SaturationFraction',
'Drainage',
'Runoff',
'ActualEvapotranspiration',
])
_var_units = {
'VegetationCover' : 'None',
'LiveLeafAreaIndex': 'None',
'PotentialEvapotranspiraton' : 'mm',
'WaterStress' : 'Pa',
'SaturationFraction' : 'None',
'Drainage' : 'mm',
'Runoff' : 'mm',
'ActualEvapotranspiration' : 'mm',
}
def __init__( self, grid, **kwds ):
self._method = kwds.pop('method', 'Grid')
self._interception_cap = kwds.pop('INTERCEPT_CAP', 1.)
self._zr = kwds.pop('ZR', 0.3)
self._runon = kwds.pop('RUNON', 0.)
self._fbare = kwds.pop('F_BARE', 0.7)
self._soil_Ib = kwds.pop('I_B', 12)
self._soil_Iv = kwds.pop('I_V', 36)
self._soil_Ew = kwds.pop('EW', 0.1)
self._soil_pc = kwds.pop('PC', 0.43)
self._soil_fc = kwds.pop('FC', 0.56)
self._soil_sc = kwds.pop('SC', 0.31)
self._soil_wp = kwds.pop('WP', 0.17)
self._soil_hgw = kwds.pop('HGW', 0.1)
self._soil_beta = kwds.pop('BETA', 12.7)
assert_method_is_valid(self._method)
super(SoilMoisture, self).__init__(grid, **kwds)
for name in self._input_var_names:
if not name in self.grid.at_cell:
self.grid.add_zeros('cell', name, units=self._var_units[name])
for name in self._output_var_names:
if not name in self.grid.at_cell:
self.grid.add_zeros('cell', name, units=self._var_units[name])
self._nodal_values = self.grid['node']
if not 'InitialSaturationFraction' in self.grid.at_cell:
self.grid.add_zeros('cell', 'InitialSaturationFraction', units='None' )
self._cell_values = self.grid['cell']
def update( self, current_time, **kwds ):
#DEBUGG = 0
P = kwds.pop('P', 5.)
Tb = kwds.pop('Tb', 24.)
Tr = kwds.pop('Tr', 0.0)
self._PET = self._cell_values['PotentialEvapotranspiration']
self._SO = self._cell_values['InitialSaturationFraction']
self._vegcover = self._cell_values['VegetationCover']
self._water_stress = self._cell_values['WaterStress']
self._S = self._cell_values['SaturationFraction']
self._D = self._cell_values['Drainage']
self._ETA = self._cell_values['ActualEvapotranspiration']
self._fr = self._cell_values['LiveLeafAreaIndex']/1.44
fbare = self._fbare
ZR = self._zr
pc = self._soil_pc
fc = self._soil_fc
sc = self._soil_sc
wp = self._soil_wp
hgw = self._soil_hgw
beta = self._soil_beta
for cell in range(0,self.grid.number_of_cells):
s = self._SO[cell]
Inf_cap = self._soil_Ib*(1-self._vegcover[cell]) + self._soil_Iv*self._vegcover[cell] # Infiltration capacity
Int_cap = min(self._vegcover[cell]*self._interception_cap, P) # Interception capacity
Peff = max(P-Int_cap, 0.0) # Effective precipitation depth
mu = (Inf_cap/1000.0)/(pc*ZR*(np.exp(beta*(1-fc))-1))
Ep = max((self._PET[cell]*self._fr[cell]+fbare*self._PET[cell]*(1-self._fr[cell])) - Int_cap, 0.001) #
nu = ((Ep/24.0)/1000.0)/(pc*ZR) # Loss function parameter
nuw = ((Ep*0.1/24)/1000.0)/(pc*ZR) # Loss function parameter
sini = self._SO[cell] + ((Peff+self._runon)/(pc*ZR*1000.0))
if sini>1:
self._runoff = (sini-1)*pc*ZR*1000
#print 'Runoff =', self._runoff
sini = 1
else:
self._runoff = 0
#self._runon = runoff
if sini>=fc:
tfc = (1.0/(beta*(mu-nu)))*(beta*(fc-sini)+ \
np.log((nu-mu+mu*np.exp(beta*(sini-fc)))/nu))
tsc = ((fc-sc)/nu)+tfc
twp = ((sc-wp)/(nu-nuw))*np.log(nu/nuw)+tsc
if Tb<tfc:
s = abs(sini-(1/beta)*np.log(((nu-mu+mu* \
np.exp(beta*(sini-fc)))*np.exp(beta*(nu-mu)*Tb) \
-mu*np.exp(beta*(sini-fc)))/(nu-mu)))
self._D[cell] = ((pc*ZR*1000)*(sini-s))-(Tb*(Ep/24))
self._ETA[cell] = (Tb*(Ep/24))
elif Tb>=tfc and Tb<tsc:
s = fc-(nu*(Tb-tfc))
self._D[cell] = ((pc*ZR*1000)*(sini-fc))-((tfc)*(Ep/24))
self._ETA[cell] = (Tb*(Ep/24))
elif Tb>=tsc and Tb<twp:
s = wp+(sc-wp)*((nu/(nu-nuw))*np.exp((-1)*((nu-nuw)/(sc-wp))*(Tb-tsc))-(nuw/(nu-nuw)))
self._D[cell] = ((pc*ZR*1000)*(sini-fc))-(tfc*Ep/24)
self._ETA[cell] = (1000*ZR*pc*(sini-s))-self._D[cell]
else:
s = hgw+(wp-hgw)*np.exp((-1)*(nuw/(wp-hgw))*max(Tb-twp,0))
self._D[cell] = ((pc*ZR*1000)*(sini-fc))-(tfc*Ep/24)
self._ETA[cell] = (1000*ZR*pc*(sini-s))-self._D[cell]
elif sini<fc and sini>=sc:
tfc = 0
tsc = (sini-sc)/nu
twp = ((sc-wp)/(nu-nuw))*np.log(nu/nuw)+tsc
if Tb<tsc:
s = sini - nu*Tb
self._D[cell] = 0
self._ETA[cell] = 1000*ZR*pc*(sini-s)
elif Tb>=tsc and Tb<twp:
s = wp+(sc-wp)*((nu/(nu-nuw))*np.exp((-1)*((nu-nuw)/(sc-wp))*(Tb-tsc))-(nuw/(nu-nuw)))
self._D[cell] = 0
self._ETA[cell] = (1000*ZR*pc*(sini-s))
else:
s = hgw+(wp-hgw)*np.exp((-1)*(nuw/(wp-hgw))*(Tb-twp))
self._D[cell] = 0
self._ETA[cell] = (1000*ZR*pc*(sini-s))
elif sini<sc and sini>=wp:
tfc = 0
tsc = 0
twp = ((sc-wp)/(nu-nuw))*np.log(1+(nu-nuw)*(sini-wp)/(nuw*(sc-wp)))
if Tb<twp:
s = wp+((sc-wp)/(nu-nuw))*((np.exp((-1)*((nu-nuw)/(sc-wp))*Tb))*(nuw+((nu-nuw)/(sc-wp))*(sini-wp))-nuw)
self._D[cell] = 0
self._ETA[cell] = (1000*ZR*pc*(sini-s))
else:
s = hgw+(wp-hgw)*np.exp((-1)*(nuw/(wp-hgw))*(Tb-twp))
self._D[cell] = 0
self._ETA[cell] = (1000*ZR*pc*(sini-s))
else:
tfc = 0
tsc = 0
twp = 0
s = hgw+(sini-hgw)*np.exp((-1)*(nuw/(wp-hgw))*Tb)
self._D[cell] = 0
self._ETA[cell] = (1000*ZR*pc*(sini-s))
self._water_stress[cell] = min(max((((sc - (s+sini)/2.) / (sc - wp))**4.),0.001),1.0)
self._S[cell] = s
self._SO[cell] = s
current_time += (Tb+Tr)/(24.*365.25)
return( current_time )
| mit | 1,385,547,615,883,129,000 | 36.774468 | 123 | 0.494086 | false | 3.130113 | false | false | false |
ggiuffre/DBNsim | DBNsite/DBNsite/settings.py | 1 | 3304 | """
Django settings for DBNsite project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# For deploying to Heroku
STATIC_ROOT = os.path.join(BASE_DIR, 'DBNtrain', 'static')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm7*slgwq#tit7*-f&s$09u39-@5!w+a_^*jlgqsbm$o*+c-g&-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['localhost', '127.0.0.1', '[::1]', '*'] # all computers in the local network
# Application definition
INSTALLED_APPS = [
'DBNtrain.apps.DbntrainConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DBNsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DBNsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'CET'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/DBNtrain/static/'
| mit | 4,501,313,335,069,155,000 | 25.645161 | 93 | 0.684927 | false | 3.434511 | false | false | false |
COMU/calibre-cloud-plugin | gui/error.py | 1 | 2052 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'untitled.ui'
#
# Created by: PyQt5 UI code generator 5.7
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 239)
Dialog.setMinimumSize(QtCore.QSize(400, 239))
Dialog.setMaximumSize(QtCore.QSize(400, 239))
self.manuel_button = QtWidgets.QPushButton(Dialog)
self.manuel_button.setGeometry(QtCore.QRect(230, 190, 85, 30))
self.manuel_button.setObjectName("manuel_button")
self.splitter = QtWidgets.QSplitter(Dialog)
self.splitter.setGeometry(QtCore.QRect(20, 20, 361, 161))
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.label_2 = QtWidgets.QLabel(self.splitter)
self.label_2.setObjectName("label_2")
self.label = QtWidgets.QLabel(self.splitter)
self.label.setObjectName("label")
self.retranslateUi(Dialog)
self.manuel_button.clicked.connect(Dialog.close)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Error"))
self.manuel_button.setText(_translate("Dialog", "Okey"))
self.label_2.setText(_translate("Dialog", "Cloud Sync için gerekli olan bağımlılıklar yüklü değildir."))
self.label.setText(_translate("Dialog", "-Pydrive\n"
"$pip install pydrive --user\n"
"\n"
"-Webdavclient-\n"
"$sudo apt-get install libxml2-dev libxslt-dev python-dev\n"
"$sudo apt-get install libcurl4-openssl-dev python-pycurl\n"
"$pip install webdavclient --user"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| gpl-3.0 | 272,182,076,008,607,520 | 36.163636 | 112 | 0.681996 | false | 3.518072 | false | false | false |
IMSGlobal/openbadges-validator-core | openbadges/verifier/tasks/crypto.py | 2 | 5183 | from Crypto.PublicKey import RSA
from jose import jwk, jws, exceptions as joseexceptions
import json
import six
from ..actions.graph import patch_node
from ..actions.tasks import add_task
from ..actions.validation_report import set_validation_subject
from ..exceptions import TaskPrerequisitesError
from ..state import get_node_by_id, get_node_by_path
from ..utils import list_of, make_string_from_bytes
from .utils import task_result
from .task_types import (ISSUER_PROPERTY_DEPENDENCIES, INTAKE_JSON, SIGNING_KEY_FETCHED, VERIFY_JWS,
VERIFY_KEY_OWNERSHIP, VALIDATE_PROPERTY, VALIDATE_REVOCATIONLIST_ENTRIES,
VERIFY_SIGNED_ASSERTION_NOT_REVOKED)
from .validation import OBClasses, ValueTypes
def process_jws_input(state, task_meta, **options):
try:
data = task_meta['data']
except KeyError:
raise TaskPrerequisitesError()
node_json = jws.get_unverified_claims(data).decode('utf-8')
node_data = json.loads(node_json)
node_id = task_meta.get('node_id', node_data.get('id'))
actions = [
add_task(INTAKE_JSON, data=node_json, node_id=node_id),
add_task(VERIFY_JWS, node_id=node_id, data=data, prerequisites=SIGNING_KEY_FETCHED)
]
if node_id:
actions.append(set_validation_subject(node_id))
return task_result(True, "Processed JWS-signed data and queued signature verification task", actions)
def verify_jws_signature(state, task_meta, **options):
try:
data = task_meta['data']
node_id = task_meta['node_id']
key_node = get_node_by_path(state, [node_id, 'verification', 'creator'])
public_pem = key_node['publicKeyPem']
except (KeyError, IndexError,):
raise TaskPrerequisitesError()
actions = [
add_task(VERIFY_KEY_OWNERSHIP, node_id=node_id),
add_task(
VALIDATE_PROPERTY, node_path=[node_id, 'badge', 'issuer'], prop_name='revocationList',
prop_type=ValueTypes.ID, expected_class=OBClasses.RevocationList, fetch=True, required=False,
prerequisites=[ISSUER_PROPERTY_DEPENDENCIES]
),
]
key = RSA.import_key(public_pem)
jwkkey = jwk.construct(key, 'RS256').to_dict()
try:
jws.verify(data, jwkkey, None)
except (joseexceptions.JWSError, joseexceptions.JWSSignatureError,) as e:
return task_result(
False, "Signature for node {} failed verification".format(node_id) + " :: " + str(e), actions)
return task_result(
True, "Signature for node {} passed verification".format(node_id), actions)
def verify_key_ownership(state, task_meta, **options):
try:
node_id = task_meta['node_id']
issuer_node = get_node_by_path(state, [node_id, 'badge', 'issuer'])
key_node = get_node_by_path(state, [node_id, 'verification', 'creator'])
key_id = key_node['id']
except (KeyError, IndexError,):
raise TaskPrerequisitesError()
actions = []
if issuer_node.get('revocationList'):
actions.append(add_task(
VERIFY_SIGNED_ASSERTION_NOT_REVOKED, node_id=node_id, prerequisites=[VALIDATE_REVOCATIONLIST_ENTRIES]
))
issuer_keys = list_of(issuer_node.get('publicKey'))
if key_id not in issuer_keys:
return task_result(
False,
"Assertion signed by a key {} other than those authorized by issuer profile".format(key_id),
actions)
return task_result(
True, "Assertion signing key {} is properly declared in issuer profile".format(key_id), actions)
def verify_signed_assertion_not_revoked(state, task_meta, **options):
try:
assertion_id = task_meta['node_id']
issuer = get_node_by_path(state, [assertion_id, 'badge', 'issuer'])
except (IndexError, KeyError, TypeError,):
raise TaskPrerequisitesError()
if not issuer.get('revocationList'):
return task_result(True, 'Assertion {} is not revoked. Issuer {} has no revocation list'.format(
assertion_id, issuer.get('id')
))
revocation_list = get_node_by_id(state, issuer['revocationList'])
revoked_assertions = revocation_list['revokedAssertions']
def _is_match(term, container):
if isinstance(container, six.string_types):
return term == container
return container.get('id') == term
revoked_match = [a for a in revoked_assertions if _is_match(assertion_id, a)]
actions = [patch_node(revocation_list['id'], {'revokedAssertions': revoked_match})]
if len(revoked_match):
assertion_records = [i for i in state['graph'] if i.get('id') == assertion_id]
msg = ''
for a in revoked_match:
try:
msg = ' with reason: ' + a['revocationReason']
except (KeyError, TypeError,):
continue
return task_result(False, "Assertion {} has been revoked in RevocationList {}{}".format(
assertion_id, issuer['revocationList'], msg
), actions)
return task_result(True, "Assertion {} is not marked as revoked in RevocationList {}".format(
assertion_id, issuer['revocationList']
), actions)
| apache-2.0 | -191,745,458,210,994,750 | 37.392593 | 113 | 0.64808 | false | 3.562199 | false | false | false |
lgray/HGCanalysis | scripts/submitLocalAnalysis.py | 3 | 2483 | #!/usr/bin/env python
import os,sys
import optparse
import commands
import time
cmsswBase=os.environ['CMSSW_BASE']
cmsswVersion=os.environ['CMSSW_VERSION']
usage = 'usage: %prog [options]'
parser = optparse.OptionParser(usage)
parser.add_option('-q', '--queue' , dest='queue' , help='batch queue' , default='local')
parser.add_option('-t', '--tag' , dest='tag' , help='tag' , default='Single13_%s_v2'%cmsswVersion)
parser.add_option('-s', '--step' , dest='step' , help='step' , default=1, type=int)
parser.add_option('-o', '--out' , dest='output' , help='output directory' , default='/store/cmst3/group/hgcal/CMSSW/ntuples/')
parser.add_option('-c', '--cfg' , dest='cfg' , help='cfg file' , default='test/runHGCSimHitsAnalyzer_cfg.py')
(opt, args) = parser.parse_args()
#prepare output
os.system('cmsMkdir %s'%opt.output)
jobsDir='%s/src/FARM%s'%(cmsswBase,time.time())
os.system('mkdir -p %s'%jobsDir)
from UserCode.HGCanalysis.storeTools_cff import fillFromStore
allFiles=fillFromStore('/store/cmst3/group/hgcal/CMSSW/'+opt.tag)
if opt.step<=0 : opt.step=len(allFiles)
outputTag=opt.tag.replace('/','_')
print outputTag
for ffile in xrange(0,len(allFiles),opt.step):
#create a wrapper for standalone cmssw job
scriptFile = open('%s/runJob_%s_%d.sh'%(jobsDir,outputTag,ffile), 'w')
scriptFile.write('#!/bin/bash\n')
scriptFile.write('cd %s/src\n'%cmsswBase)
scriptFile.write('eval `scram r -sh`\n')
scriptFile.write('cd %s\n'%jobsDir)
scriptFile.write('cmsRun %s/src/UserCode/HGCanalysis/%s %s %d %d\n'%(cmsswBase,opt.cfg,opt.tag,ffile,opt.step))
scriptFile.write('cmsStage -f /tmp/psilva/%s_SimHits_%d.root %s\n'%(outputTag,ffile,opt.output))
scriptFile.write('rm /tmp/psilva/%s_SimHits_%d.root\n'%(outputTag,ffile))
scriptFile.write('echo "All done for this job\"\n')
scriptFile.close()
os.system('chmod u+rwx %s/runJob_%s_%d.sh'%(jobsDir,outputTag,ffile))
#submit it to the batch or run it locally
if opt.queue=='local':
os.system('sh %s/runJob_%s_%d.sh'%(jobsDir,outputTag,ffile))
else:
os.system("bsub -q %s \'%s/runJob_%s_%d.sh\'"%(opt.queue,jobsDir,outputTag,ffile))
| gpl-3.0 | 8,585,814,696,060,641,000 | 49.673469 | 182 | 0.593637 | false | 3.050369 | false | false | false |
espeed/lightsocket | lightsocket/resources/example.py | 1 | 1880 | # -*- coding: utf-8 -*-
#
# Copyright 2011 James Thornton (http://jamesthornton.com)
# BSD License (see LICENSE for details)
#
import random
from lightsocket.server import Resource, Response, Router
class ExampleProxy(Resource):
# This example class is used in composition of the primary resource (below).
# Each resource has the same structure and required methods.
def __init__(self):
self.router = Router(self)
self.method_map = dict(yep=self.yep)
# This is the default request handler.
# A request handler is required in each class.
def handle_request(self,request):
method = self.get_method(request)
return method(request)
# This method is public because it has been added to method_map.
def yep(self,request):
data = "yep: " + str(random.random())
return Response(200,data)
class Example(Resource):
def __init__(self):
# The router is required in each object.
# It taks one arg for the container, which will always be self.
self.router = Router(self)
# Add any objects you want to include in this resource
# the name will be the path segment
# e.g. /example/proxy
self.router.add("proxy",ExampleProxy())
# Add this object's public methods
self.method_map = dict(test=self.test)
# This is the default request handler.
# A request handler is required in each class.
def handle_request(self,request):
method = self.get_method(request)
return method(request)
# This method is not public because it's not in method_map.
def shutdown(self,params):
return Response(200,None)
# This method is public because it has been added to method_map.
def test(self,request):
data = "test: " + str(random.random())
return Response(200,data)
| bsd-3-clause | 4,494,054,666,551,457,000 | 29.322581 | 81 | 0.651064 | false | 4.025696 | false | false | false |
Erikun/elogy | elogy/api/attachments.py | 2 | 1693 | from datetime import datetime
import json
from flask import jsonify, url_for
from flask_restful import Resource, reqparse
from werkzeug import FileStorage
from ..attachments import save_attachment
from ..utils import get_utc_datetime
attachments_parser = reqparse.RequestParser()
attachments_parser.add_argument(
"attachment", type=FileStorage, action="append",
location='files', required=True)
attachments_parser.add_argument("timestamp", type=str)
attachments_parser.add_argument("metadata", type=str)
attachments_parser.add_argument("embedded", type=bool,
default=False)
class AttachmentsResource(Resource):
def post(self, logbook_id, entry_id):
"Upload attachments to an entry"
args = attachments_parser.parse_args()
if args.get("timestamp"):
timestamp = get_utc_datetime(args["timestamp"])
else:
timestamp = datetime.utcnow()
if args.get("metadata"):
metadata = json.loads(args["metadata"])
else:
metadata = None
for attachment in args["attachment"]:
print(attachment)
attachment = save_attachment(attachment, timestamp,
entry_id, metadata,
embedded=args["embedded"])
attachment.save()
return jsonify(id=attachment.id,
location=url_for("get_attachment",
path=attachment.path),
content_type=attachment.content_type,
filename=attachment.filename,
metadata=attachment.metadata)
| gpl-3.0 | 1,923,343,569,552,397,800 | 35.804348 | 67 | 0.600118 | false | 4.93586 | false | false | false |
MLAB-project/sensor-ui | src/SensorServer/scripts/ROSpymlabServer.py | 1 | 4252 | #!/usr/bin/env python
import rospy
import pymlab
from pymlab import config
import sys
import sensor_server
from std_msgs.msg import String
from std_msgs.msg import Float32
import std_msgs
from sensor_server.srv import *
from sensor_server.msg import *
def server(req):
print req
print "Returning [%s + %s]"%(req.name, req.data)
return GetSensValResponse( 10 )
class pymlab_server():
def __init__(self):
self.pymlab_read = False # slouzi k ochrane pymlabu pred pokusem o dve cteni zaroven ...
def init(self, cfg=None):
self.status = False
self.init = cfg
self.cfg_i2c = eval(cfg.i2c)
self.cfg_bus = eval(cfg.bus)
self.devices = {}
Local_devices = {}
rospy.loginfo("configuracni soubor: %s" %str(cfg))
i2c = {
"port": 1,
}
bus = [
{
"name": "lts01",
"type": "lts01",
},
]
self.pymlab_config = config.Config(i2c = self.cfg_i2c, bus = self.cfg_bus)
self.pymlab_config.initialize()
#print self.cfg_bus
for x in self.cfg_bus:
#print "init >> ", x, x['name'], x['type']
self.devices[x['name']] = self.pymlab_config.get_device(x['name'])
rospy.set_param("devices", str(self.devices))
#print "self.devices>>", self.devices
rospy.loginfo("self.device: %s" %str(self.devices))
return True
def getvalue(self, cfg=None):
#print "getval>>"
#print cfg
val = int(float(self.lts_sen.get_temp()))
#print "value je tohle:", val
return GetSensValResponse(val)
def status(self, cfg=None):
#print "status>>",
#print cfg
self.rate = 1
try: # pokud je servis s GetSensVal, tak se pouzije toto,
ecfg = eval(cfg.data)
except Exception, e: # toto je pro zpravu 'pymlab_server'
ecfg = eval(cfg)
print ecfg
if 'rate' in ecfg:
self.rate = ecfg['rate']
#print "Vlastni frekvence", self.rate
rospy.set_param("rate", float(self.rate))
if 'AutoInputs' in ecfg:
self.AutoInputs = ecfg['AutoInputs']
rospy.set_param("AutoInputs", str(self.AutoInputs))
if "start" in ecfg:
self.status = True
rate = rospy.Rate(self.rate)
AutoInputs = self.AutoInputs
devices = self.devices
sender = rospy.Publisher('pymlab_data', SensorValues, queue_size=20)
values = {}
for x in AutoInputs:
for y in AutoInputs[x]:
#print "AutoInputs >>", x, y,
#print str(x)+"/"+str(y), str(x)+"/"+str(y)
values[str(x)+"/"+str(y)] = str(x)+"/"+str(y)
rospy.set_param("values", values)
# print "\n run \n\n"
while True:
print "\r",
for x in AutoInputs:
for y in AutoInputs[x]:
while self.pymlab_read: pass
self.pymlab_read = True
data = getattr(self.devices[devices[x].name], y)()
self.pymlab_read = False
print x, "%.3f"%data, "||",
sender.publish(name=str(devices[x].name)+"/"+str(y), value=data)
#senderTest.publish(data)
print "\r",
rate.sleep()
return True
def drive(self, cfg):
#print cfg
raval = "---"
reval = getattr(self.devices[cfg.device], cfg.method)(*eval(cfg.parameters))
return str(reval)
def main():
ps = pymlab_server()
rospy.init_node('pymlab_node')
rospy.Subscriber("pymlab_server", PymlabServerStatusM, ps.status)
s1 = rospy.Service('pymlab_init', PymlabInit, ps.init)
s2 = rospy.Service('pymlab_server', PymlabServerStatus, ps.status)
s3 = rospy.Service('pymlab_drive', PymlabDrive, ps.drive)
rospy.loginfo("Ready to get work.")
rospy.spin()
if __name__ == "__main__":
main() | gpl-2.0 | 7,482,300,904,354,415,000 | 33.024 | 96 | 0.516463 | false | 3.558159 | true | false | false |
arcivanov/pybuilder | src/unittest/python/plugins/python/install_dependencies_plugin_tests.py | 3 | 8170 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from pybuilder.core import (Project,
Logger)
from pybuilder.pip_utils import PIP_MODULE_STANZA
from pybuilder.plugins.python.install_dependencies_plugin import (initialize_install_dependencies_plugin,
install_runtime_dependencies,
install_build_dependencies,
install_dependencies)
from test_utils import Mock, ANY, patch, call
__author__ = "Alexander Metzner, Arcadiy Ivanov"
class InstallRuntimeDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("install_env", "whatever")
self.project.set_property("dir_install_logs", "any_directory")
self.project.set_property("dir_target", "/any_target_directory")
self.logger = Mock(Logger)
self.reactor = Mock()
self.pyb_env = Mock()
self.pyb_env.executable = ["a/b"]
self.pyb_env.env_dir = "a"
self.pyb_env.execute_command.return_value = 0
self.reactor.python_env_registry = {"whatever": self.pyb_env}
initialize_install_dependencies_plugin(self.project)
@patch("pybuilder.install_utils.tail_log")
@patch("pybuilder.install_utils.open")
@patch("pybuilder.install_utils.create_constraint_file")
@patch("pybuilder.install_utils.get_packages_info", return_value={})
def test_should_install_multiple_dependencies(self,
*_):
self.project.depends_on("spam")
self.project.depends_on("eggs")
self.project.depends_on_requirements("requirements.txt")
install_runtime_dependencies(self.logger, self.project, self.reactor)
exec_cmd = self.pyb_env.execute_command
call_stanza = self.pyb_env.executable + PIP_MODULE_STANZA + ["install", "-c", ANY]
exec_cmd.assert_called_with(call_stanza +
["eggs", "spam", "-r", "requirements.txt"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True)
@patch("pybuilder.install_utils.tail_log")
@patch("pybuilder.install_utils.open")
@patch("pybuilder.install_utils.create_constraint_file")
@patch("pybuilder.install_utils.get_packages_info", return_value={})
def test_should_install_multiple_dependencies_locally(self,
*_):
self.project.depends_on("spam")
self.project.depends_on("eggs")
self.project.depends_on("foo")
self.project.set_property("install_dependencies_local_mapping", {
"spam": "any-dir",
"eggs": "any-other-dir"
})
install_runtime_dependencies(self.logger, self.project, self.reactor)
exec_cmd = self.pyb_env.execute_command
call_stanza = self.pyb_env.executable + PIP_MODULE_STANZA + ["install", "-c", ANY]
exec_cmd.assert_has_calls([call(call_stanza + ["-t", "any-other-dir", "eggs"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True),
call(call_stanza + ["-t", "any-dir", "spam"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True),
call(call_stanza + ["foo"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True)
], any_order=True)
class InstallBuildDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("install_env", "whatever")
self.project.set_property("dir_install_logs", "any_directory")
self.project.set_property("dir_target", "/any_target_directory")
self.logger = Mock(Logger)
self.reactor = Mock()
self.pyb_env = Mock()
self.pyb_env.executable = ["a/b"]
self.pyb_env.env_dir = "a"
self.pyb_env.execute_command.return_value = 0
self.reactor.python_env_registry = {"whatever": self.pyb_env}
initialize_install_dependencies_plugin(self.project)
@patch("pybuilder.install_utils.tail_log")
@patch("pybuilder.install_utils.open")
@patch("pybuilder.install_utils.create_constraint_file")
@patch("pybuilder.install_utils.get_packages_info", return_value={})
def test_should_install_multiple_dependencies(self,
*_):
self.project.build_depends_on("spam")
self.project.build_depends_on("eggs")
self.project.build_depends_on_requirements("requirements-dev.txt")
install_build_dependencies(self.logger, self.project, self.reactor)
exec_cmd = self.pyb_env.execute_command
call_stanza = self.pyb_env.executable + PIP_MODULE_STANZA + ["install", "-c", ANY]
exec_cmd.assert_called_with(call_stanza +
["eggs", "spam", "-r", "requirements-dev.txt"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True)
class InstallDependenciesTest(unittest.TestCase):
def setUp(self):
self.project = Project("unittest", ".")
self.project.set_property("install_env", "whatever")
self.project.set_property("dir_install_logs", "any_directory")
self.project.set_property("dir_target", "/any_target_directory")
self.logger = Mock(Logger)
self.reactor = Mock()
self.pyb_env = Mock()
self.pyb_env.executable = ["a/b"]
self.pyb_env.env_dir = "a"
self.pyb_env.execute_command.return_value = 0
self.reactor.python_env_registry = {"whatever": self.pyb_env}
initialize_install_dependencies_plugin(self.project)
@patch("pybuilder.install_utils.tail_log")
@patch("pybuilder.install_utils.open")
@patch("pybuilder.install_utils.create_constraint_file")
@patch("pybuilder.install_utils.get_packages_info", return_value={})
def test_should_install_single_dependency_without_version(self,
*_):
self.project.depends_on("spam")
self.project.build_depends_on("eggs")
install_dependencies(self.logger, self.project, self.reactor)
exec_cmd = self.pyb_env.execute_command
call_stanza = self.pyb_env.executable + PIP_MODULE_STANZA + ["install", "-c", ANY]
exec_cmd.assert_called_with(call_stanza +
["eggs", "spam"],
outfile_name=ANY,
error_file_name=ANY,
env=ANY, cwd=None, shell=False, no_path_search=True)
| apache-2.0 | 2,477,966,878,217,443,300 | 44.642458 | 105 | 0.564994 | false | 4.0667 | true | false | false |
sheagcraig/python-jss | tests/test_jamf_software_server.py | 1 | 1302 | from __future__ import absolute_import
import pytest
import os.path
import jss
from jss import JSS, QuerySet
from xml.etree import ElementTree
from jss.exceptions import GetError
def mock_expanduser(path):
return path
class TestJSS(object):
def test_construct_without_jssprefs(self, jss_prefs_dict):
j = JSS(url=jss_prefs_dict['jss_url'], user=jss_prefs_dict['jss_user'], password=jss_prefs_dict['jss_password'])
assert j is not None
def test_construct_with_jssprefs(self, jss_prefs, monkeypatch, tmpdir):
def mock_expanduser(path):
return tmpdir.join(path.replace('~', 'HOME'))
monkeypatch.setattr(os.path, 'expanduser', mock_expanduser)
# monkeypatch.setattr(os.path, 'startswith', lambda p: False)
j = JSS(jss_prefs=jss_prefs)
assert j is not None
def test_trailing_slash_removed(self, jss_prefs_dict):
j = JSS(url=jss_prefs_dict['jss_url']+'/')
assert j.base_url[-1] != '/'
def test_get_packages(self, j):
result = j.Package()
assert result is not None
assert isinstance(result, QuerySet)
def test_scrape(self, j):
#scrape_url = '/'
scrape_url = 'legacy/packages.html?id=-1&o=c'
r = j.scrape(scrape_url)
assert r is not None | gpl-3.0 | 7,726,659,739,758,076,000 | 30.02381 | 120 | 0.645161 | false | 3.364341 | true | false | false |
RudoCris/horizon | openstack_dashboard/dashboards/router/nexus1000v/tables.py | 30 | 3496 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateNetworkProfile(tables.LinkAction):
name = "create"
verbose_name = _("Create Network Profile")
url = "horizon:router:nexus1000v:create_network_profile"
classes = ("ajax-modal",)
icon = "plus"
class DeleteNetworkProfile(tables.DeleteAction):
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Network Profile",
u"Delete Network Profiles",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Deleted Network Profile",
u"Deleted Network Profiles",
count
)
def delete(self, request, obj_id):
try:
api.neutron.profile_delete(request, obj_id)
except Exception:
msg = _('Failed to delete network profile (%s).') % obj_id
LOG.info(msg)
redirect = reverse('horizon:router:nexus1000v:index')
exceptions.handle(request, msg, redirect=redirect)
class EditNetworkProfile(tables.LinkAction):
name = "update"
verbose_name = _("Edit Network Profile")
url = "horizon:router:nexus1000v:update_network_profile"
classes = ("ajax-modal",)
icon = "pencil"
class NetworkProfile(tables.DataTable):
id = tables.Column("id", hidden=True)
name = tables.Column("name", verbose_name=_("Network Profile"), )
project = tables.Column("project_name", verbose_name=_("Project"))
segment_type = tables.Column("segment_type",
verbose_name=_("Segment Type"))
sub_type = tables.Column("sub_type",
verbose_name=_("Sub Type"))
segment_range = tables.Column("segment_range",
verbose_name=_("Segment Range"))
multicast_ip_range = tables.Column("multicast_ip_range",
verbose_name=_("Multicast IP Range"))
physical_network = tables.Column("physical_network",
verbose_name=_("Physical Network Name"))
class Meta(object):
name = "network_profile"
verbose_name = _("Network Profile")
table_actions = (CreateNetworkProfile, DeleteNetworkProfile,)
row_actions = (EditNetworkProfile, DeleteNetworkProfile,)
class PolicyProfile(tables.DataTable):
id = tables.Column("id", hidden=True)
name = tables.Column("name", verbose_name=_("Policy Profile"), )
project = tables.Column("project_name", verbose_name=_("Project"))
class Meta(object):
name = "policy_profile"
verbose_name = _("Policy Profile")
| apache-2.0 | 4,554,167,554,533,875,700 | 34.313131 | 78 | 0.639874 | false | 4.397484 | false | false | false |
geodynamics/specfem3d | utils/dynamic_rupture/curved_fault_mesh/create_semisphere_mesh.py | 1 | 14359 | #!python
# Create 3D mesh files.
# Huihui Weng (Geoazur, 2018)
#
# ======================================================================
from __future__ import print_function
import numpy
import os
import sys
# Please set up the path for CUBIT (or Trelis) and GEOCUBIT in your system.
# Instead, you could set up the path from ~/.bashrc
sys.path.append('/opt/linux64/Trelis-14.0/bin/')
sys.path.append('/opt/linux64/specfem3d/CUBIT_GEOCUBIT/')
import cubit
print("Init CUBIT...")
try:
# print all the information to the screen.
cubit.init([""])
# stop all the outout information and warnings to the screen.
#cubit.init(["-noecho","-nojournal","-information=off","-warning=off"])
except:
pass
from geocubitlib import absorbing_boundary
from geocubitlib import save_fault_nodes_elements
from geocubitlib import cubit2specfem3d
#=====================================
# Set up parameters ===
#=====================================
# Please set up the mesh parametes in this section
# If DEBUG is True, then this script only create CUBIT script, otherwise create CUBIT script and mesh file.
# It is recommended to debug this script by GUI of CUBIT before to create Specfem3D mesh.
#DEBUG = True
DEBUG = False
# The radius of the semi-sphere (km)
R_model = 100
# The radius of the Cylinder that cut through both the free surface and fault (km)
R_cylinder = 10
work_dir = os.getcwd()
# If Interface is False, then use planar fault (given by the strike, dip, and dep). Otherwise run the scripts in ./Interface and give the path of the created interface (in the directory ./output)
# If Topography is False, then use planar surface. Otherwise run the scripts in ./Surface and give the path of the created planarsur (in the directory ./output)
Interface = False
Topography = False
Int_name = work_dir + "/output/interface_sigma_1_inc_12.sat"
Top_name = work_dir + "/output/surface_sigma_1_inc_12.sat"
Strike = 230
Dip = 70
Dep = -5.7
# Uniform material properties.
vp = 5770 # P wave speed (m/s)
vs = 3330 # S wave speed (m/s)
rho = 2705 # density (g/m^3)
Q = 13
# The mesh size (km). Smaller grid size can better sample curved geometries.
fine_size = 4
coarse_size = 8
# The mesh scheme: thex
# Thex: firstly create a tetrahedral unstructured mesh, then convert into a hexahedral mesh (reduce the grid size by hal). This mesh scheme have good flexibility for curved geometries.
# Noted that the final mesh is hexahedral mesh
mesh_scheme = "thex"
# The element type for hexahedral mesh: HEX8 or HEX27 (supported by Specfem3D)
# Higer order nodes can be moved to curved geometry by defaut, if set Node Constraint ON.
element_type = "HEX8"
#element_type = "HEX27"
# Set up the lower depth of seimogenic zone. The rupture can propogate to the free surface here.
Lower_cutoff = -30
# The name of CUBIT script. One can run this script under the GUI of CUBIT for debuging. This python code will run this script without GUI.
journalFile = "./output/Kumamoto.jou"
# The name (prefix name) of created mesh directory for Specfem3D. The full name is the prefix name + features of fault and free surface.
mesh_name = "Kumamoto"
#=====================================
#==============================
# Main code ===
#==============================
# There is no need to change anything below. If you need to change something,
# please send me an email. I will try to make it more automatic.
#
print("Initial check...")
# Initial check
if(not os.path.isfile(Int_name) and Interface):
print("The interface data does not exis!!! Please create it in ./Interface.")
exit()
elif(os.path.isfile(Int_name) and Interface):
print("Using interface slab: ", Int_name)
else:
print("Using planar fault with strike: ", Strike, " dip: ", Dip, " depth(reference point): ", Dep)
if(not os.path.isfile(Top_name) and Topography):
print("The topography data does not exis!!! Please create it in ./Surface.")
elif(os.path.isfile(Top_name) and Topography):
print("Using topography: ", Top_name)
else:
print("Using planar topography.")
# The name of output mesh file
if(Interface and Topography):
output_mesh = mesh_name + "_semisphere_curvedfault_curvedtopo"
elif(not Interface and Topography):
output_mesh = mesh_name + "_semisphere_planarfault" + "_strike_" + str(Strike) + "_dip_" + str(Dip) + "_depth_" + str(Dep) + "_curvedtopo"
elif(Interface and not Topography):
output_mesh = mesh_name + "_semisphere_curvedfault_planarsur"
else:
output_mesh = mesh_name + "_semisphere_planarfault" + "_strike_" + str(Strike) + "_dip_" + str(Dip) + "_depth_" + str(Dep) + "_planarsur"
# Add the info of mesh scheme
output_mesh = output_mesh + "_" + str(fine_size) + "_" + str(coarse_size) + "_" + element_type
# Vertical length of cylinder
L_cylinder = abs(2 * Lower_cutoff)
# Create the journal file for debuging
print("Create journal file...")
j = open(journalFile, 'w')
j.write("# Journal file formatting, etc.\n" + \
"# ----------------------------------------------------------------------\n" + \
"# Set units to SI.\n" + \
"# ----------------------------------------------------------------------\n" \
"${Units('si')}\n" + \
"# Reset geometry.\n" + \
"# ----------------------------------------------------------------------\n" \
"reset\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Create a cylinder.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("create cylinder height {0} radius {1}\n".format("{"+str(L_cylinder)+"*km}",\
"{"+str(R_cylinder)+"*km}"))
j.write("${idVol1=Id('volume')}\n")
if(Interface):
j.write("# ----------------------------------------------------------------------\n" + \
"# Import interface data.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("import Acis '%s'\n" % Int_name)
j.write("${idInt=Id('surface')}\n")
else:
j.write("# ----------------------------------------------------------------------\n" + \
"# Create planar interface.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("create planar surface zplane\n")
j.write("${idInt=Id('surface')}\n")
j.write("rotate surface {idInt} about Y angle %f\n" % Dip)
if(Strike != 0):
j.write("rotate surface {idInt} about Z angle %f\n" % -Strike)
j.write("surface {idInt} move z {%f*km}\n" % Dep)
if(Topography):
j.write("# ----------------------------------------------------------------------\n" + \
"# Import topography data\n" + \
"# ----------------------------------------------------------------------\n")
j.write("import Acis '%s'\n" % Top_name)
j.write("${idSur=Id('surface')}\n")
else:
j.write("# ----------------------------------------------------------------------\n" + \
"# Create planar free surface.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("create planar surface zplane\n")
j.write("${idSur=Id('surface')}\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Webcut blocks.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("webcut volume {idVol1} with sheet extended from surface {idSur}\n")
j.write("${idVol2=Id('volume')}\n")
j.write("webcut volume {idVol2} with sheet extended from surface {idInt}\n")
j.write("${idVol3=Id('volume')}\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Find and name the fault surface.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("find surface overlap volume {idVol2} {idVol3}\n")
j.write("${idF1=GroupMemberId('surf_overlap','surface',0)}\n")
j.write("${idF2=GroupMemberId('surf_overlap','surface',1)}\n")
j.write("surface {idF1} name 'fault1'\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Create semi-sphere\n" + \
"# ----------------------------------------------------------------------\n")
j.write("create sphere radius {%f *km}\n" % R_model)
j.write("${idVol4=Id('volume')}\n")
j.write("webcut volume {idVol4} with sheet extended from surface {idSur}\n")
j.write("${idVol5=Id('volume')}\n")
j.write("${idround=Id('surface')}\n")
j.write("surface {idround} name 'spheresurf'\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Substract the semi-spehere from the blocks that contain the fault\n" + \
"# ----------------------------------------------------------------------\n")
j.write("subtract volume {idVol2} {idVol3} from volume {idVol5} keep\n")
j.write("${idVol6=Id('volume')}\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Delete unused blocks and surfaces.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("delete surface all\n")
j.write("delete volume {idVol1} {idVol4} {idVol5} \n")
j.write("# ----------------------------------------------------------------------\n" + \
"# imprint and merge.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("imprint all\n" + \
"merge all\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Generate the mesh.\n" + \
"# ----------------------------------------------------------------------\n")
if(mesh_scheme == "thex"):
j.write("volume all scheme TetMesh\n")
j.write("volume {idVol2} {idVol3} size {%f*km}\n" % fine_size)
j.write("mesh volume {idVol2} \n")
j.write("mesh volume {idVol3}\n")
j.write("volume {idVol6} size {%f*km}\n" % coarse_size)
j.write("mesh volume {idVol6} \n")
j.write("THex Volume all\n")
else:
print("Error mesh scheme!")
exit()
j.write("# ----------------------------------------------------------------------\n" + \
"# Smooth mesh to improve quality.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("volume all smooth scheme condition number beta 2.0 cpu 4\n" + \
"smooth volume all\n")
j.write("set unmerge Duplicate_mesh on\n")
j.write("unmerge surface fault1 only\n")
j.write("surface {idF2} name 'fault2'\n")
j.write("# ----------------------------------------------------------------------\n" + \
"# Seperate nodes on fault.\n" + \
"# ----------------------------------------------------------------------\n")
j.write("set node constraint off\n")
j.write("node in surface fault1 move normal to surface fault1 distance {-0.01*m}\n")
j.write("node in surface fault2 move normal to surface fault2 distance {-0.01*m}\n")
j.write("compress all\n")
j.write("set node constraint on\n")
j.write("# End of file\n")
j.close()
if(DEBUG):
exit()
# ==================================================
# Read the CUBIT journal and playback it.
# ==================================================
print("Playback journal file...")
with open(journalFile) as f:
content = f.readlines()
for line in content:
cubit.cmd(line)
# ==================================================
# Save the mesh to txt files
# This part is revised from the code of Specfem3D
# ==================================================
print("")
print("Convert mesh to Specfem-format...")
os.system('mkdir -p MESH')
## fault surfaces (up/down)
Au = [cubit.get_id_from_name("fault1")]
Ad = [cubit.get_id_from_name("fault2")]
### Obtain the id of boundaries
# I define the original sphere surface as spheresurf. After webcut, CUBIT renames the new-cutted surface by adding @A, @B ...
SpheresurfID = [cubit.get_id_from_name("spheresurf@A")]
# Find the surface ID for the free surface
freesur_tolerance = 3e3
FreesurfID = []
list_surf=cubit.parse_cubit_list("surface","all")
for k in list_surf:
center_point = cubit.get_center_point("surface", k)
if abs(center_point[2]) <= freesur_tolerance:
FreesurfID.append(k)
print(SpheresurfID,FreesurfID)
# define blocks
Vol_num = cubit.get_volume_count()
for i in range(Vol_num):
cubit.cmd('block {0} hex in vol {0}'.format(i+1))
cubit.cmd('block 1000 face in surface ' + str(list(SpheresurfID)).replace("["," ").replace("]"," "))
cubit.cmd('block 1000 name "face_semisphere"')
cubit.cmd('block 1001 face in surface ' + str(list(FreesurfID)).replace("["," ").replace("]"," "))
cubit.cmd('block 1001 name "face_topo"')
#### Define material properties for the 4 volumes ################
cubit.cmd('#### DEFINE MATERIAL PROPERTIES #######################')
for i in range(Vol_num):
cubit.cmd('block {0} name "elastic {0}" '.format(i+1)) # material region
cubit.cmd('block {0} attribute count {1}'.format(i+1,6))
cubit.cmd('block {0} attribute index 1 1'.format(i+1))
cubit.cmd('block {0} attribute index 2 {1}'.format(i+1,vp)) # vp
cubit.cmd('block {0} attribute index 3 {1}'.format(i+1,vs)) # vs
cubit.cmd('block {0} attribute index 4 {1}'.format(i+1,rho)) # rho
cubit.cmd('block {0} attribute index 5 {1}'.format(i+1,Q)) # Q flag (see constants.h: #IATTENUATION_ ... )
cubit.cmd('block {0} attribute index 6 0'.format(i+1)) # q flag (see constants.h: iattenuation_ ... )
#### Export to SPECFEM3D format using cubit2specfem3d.py of GEOCUBIT
if(element_type == "HEX27"):
cubit2specfem3d.export2SPECFEM3D('MESH',hex27=True)
else:
cubit2specfem3d.export2SPECFEM3D('MESH')
# You need to create fault mesh file in the last, if using hex27.
faultA = save_fault_nodes_elements.fault_input(1,Au,Ad)
print("Save created mesh...")
# Save create directory as given name
os.system('rm -rf output/' + output_mesh)
os.system('mv MESH output/' + output_mesh)
# End of script
| gpl-3.0 | -3,311,555,830,420,008,400 | 43.317901 | 195 | 0.515913 | false | 3.61051 | false | false | false |
TheRedFireFox/AnimeSubBot | src/messages/msg_processor.py | 1 | 50108 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# standard lib
import re
import json
import datetime
# The custom modules
from . import message # imports in the same folder (module)
from . import emojis
class MessagePreProcessor(object):
"""
This class is used as the user message preanalyser.
This class will primary be used so that the code will
be easily reusable.
The MessageObject will only contains a single message object,
so that this class will be thread save and so that we can run
multiple instances per unit.
The message object will contain all the following parts.\n
.. code-block:: python\n
{
'message': {
'date': 1439471738,
'text': '/start',
'from': {
'id': 3xxxxxx6,
'last_name': 'Sample',
'first_name': 'Max',
'username': 'TheUserName'
},
'message_id': 111,
'chat': {
'id': -xxxxxxx,
'title': 'Drive'
}
},
'update_id': 469262057
}
}
"""
def __init__(self,
MessageObject,
OutputQueue,
SqlObject,
Cursor,
LanguageObject,
LoggingObject,
ConfigurationObject,):
"""
Variables:
MessageObject ``object``
the message to be analysed message
"""
self.LastSendCommand = None
self.LastSendCommand = None
self.LastUsedId = None
# This variable will stop the system if the message was send during process
self.MessageSend = False
# Predefining attributes so that it later can be used for evil.
self.LoggingObject = None
self.ConfigurationObject = None
# output queue
self._OutputQueue_ = OutputQueue
# SqlObjects
self.SqlObject = SqlObject
self.SqlCursor = Cursor
self.LoggingObject = LoggingObject
self.ConfigurationObject = ConfigurationObject
# This variable is needed for the logger so that the log end up
# getting printed in the correct language.
self.M_ = LanguageObject.CreateTranslationObject().gettext
if "update_id" in MessageObject:
# The update‘s unique identifier. Update identifiers start from a
# certain positive number and increase sequentially. This ID
# becomes especially handy if you’re using web hooks, since it
# allows you to ignore repeated updates or to restore the correct
# update sequence, should they get out of order.
self.UpdateId = MessageObject["update_id"]
if "message_id" in MessageObject["message"]:
# Unique message identifier
self.MessageID = MessageObject["message"]["message_id"]
# get the user of the message
# get user data from the message
if "first_name" in MessageObject["message"]["from"]:
# User‘s or bot’s first name
self.UserFirstName = MessageObject["message"]["from"]["first_name"]
else:
self.UserFirstName = ""
if "last_name" in MessageObject["message"]["from"]:
# Optional. User‘s or bot’s last name
self.UserLastName = MessageObject["message"]["from"]["last_name"]
else:
self.UserLastName = ""
if "username" in MessageObject["message"]["from"]:
# Optional. User‘s or bot’s username
self.UserName = MessageObject["message"]["from"]["username"]
else:
self.UserName = ""
if "id" in MessageObject["message"]["from"]:
# Unique identifier for this user or bot
self.UserId = MessageObject["message"]["from"]["id"]
# Add user to the system if not exists
if self.UserExists() is False:
self.AddUser()
# Get the Internal user id
self.InternalUserId, self.IsAdmin = self.GetUserData()
# Here we are initialising the function for the translations.
# Get the user settings from the user that has send the message
Query = ("SELECT User_Setting_Table.User_String FROM "
"User_Setting_Table INNER JOIN Setting_Table ON "
"User_Setting_Table.Master_Setting_Id="
"Setting_Table.Id WHERE Setting_Table.Setting_Name=%s"
" AND User_Setting_Table.Set_By_User=%s;"
)
Data = ("Language", self.InternalUserId)
self.LanguageName = (
self.SqlObject.ExecuteTrueQuery(
self.SqlCursor,
Query,
Data
)[0]["User_String"])
self.LanguageObject = LanguageObject
Language = self.LanguageObject.CreateTranslationObject(
Languages=[self.LanguageName]
)
# create the translator
self._ = Language.gettext
# Get the text message with the command
if "text" in MessageObject["message"]:
self.Text = MessageObject["message"]["text"]
else:
self.Text = None
# where was the message send from the user or the group
# Get the chat id
if "id" in MessageObject["message"]["chat"]:
# Unique identifier for this group chat
self.ChatId = MessageObject["message"]["chat"]["id"]
# Check if message is from a group or not.
if self.ChatId == self.UserId:
self.InGroup = False
else:
self.InGroup = True
self.GroupName = MessageObject["message"]["chat"]["title"]
# Check if group exists
if self.GroupExists() is False:
self.AddGroup()
self.InternalGroupId = self.GetInternalGroupId()
if "date" in MessageObject["message"]:
# changing the arrival time to a python understandable time
# as well as a MySql understandable format
self.MessageDate = datetime.datetime.fromtimestamp(
int(MessageObject["message"]["date"])
).strftime('%Y-%m-%d %H:%M:%S')
if "forward_from" in MessageObject["message"]:
self.ForwardedFrom = MessageObject["message"]["forward_from"]
if "forward_date" in MessageObject["message"]:
# Optional. For forwarded messages, date the original
# message was sent in Unix time
self.forward_date = MessageObject["message"]["forward_from"]
if "reply_to_message" in MessageObject["message"]:
# Optional. For replies, the original message. Note that
# the Message object in this field will not contain further
# reply_to_message fields even if it itself is a reply.
self.ReplyToMessage = MessageObject["message"]["reply_to_message"]
if "audio" in MessageObject["message"]:
# Optional. Message is an audio file, information about the file
self.MessageAudio = MessageObject["message"]["audio"]
if "document" in MessageObject["message"]:
# Optional. Message is a general file, information about the file
self.MEssageDocument = MessageObject["message"]["document"]
if "photo" in MessageObject["message"]:
# Optional. Message is a photo, available sizes of the photo
self.MessagePhoto = MessageObject["message"]["photo"]
if "sticker" in MessageObject["message"]:
# Optional. Message is a sticker, information about the sticker
self.MessageSticker = MessageObject["message"]["sticker"]
if "video" in MessageObject["message"]:
# Optional. Message is a video, information about the video
self.MessageVideo = MessageObject["message"]["video"]
if "caption" in MessageObject["message"]:
# Optional. Caption for the photo or video
self.MessageCaption = MessageObject["message"]["caption"]
if "contact" in MessageObject["message"]:
# Optional. Message is a shared contact, information about
# the contact
self.MessageContact = MessageObject["message"]["contact"]
if "location" in MessageObject["message"]:
# Optional. Message is a shared location, information about
# the location
self.MessageLocation = MessageObject["message"]["location"]
if "venue" in MessageObject["message"]:
# Optional. Message is a venue, information about the venue
self.Venue = MessageObject["message"]["venue"]
if "new_chat_participant" in MessageObject["message"]:
# Optional. A new member was added to the group, information
# about them (this member may be bot itself)
self.MessageNewChatParticipant = (
MessageObject["message"]["new_chat_participant"]
)
if "left_chat_participant" in MessageObject["message"]:
# Optional. A member was removed from the group, information
# about them (this member may be bot itself)
self.MessageLeftChatParticipant = (
MessageObject["message"]["left_chat_participant"]
)
if "new_chat_title" in MessageObject["message"]:
# Optional. A group title was changed to this value
self.MessageNewChatTitle = (
MessageObject["message"]["new_chat_title"]
)
if "new_chat_photo" in MessageObject["message"]:
# Optional. A group photo was change to this value
self.MessageNewChatPhoto = (
MessageObject["message"]["new_chat_photo"]
)
if "delete_chat_photo" in MessageObject["message"]:
# Optional. Informs that the group photo was deleted
self.MessageDeleteChatPhoto = (
MessageObject["message"]["delete_chat_photo"]
)
if "group_chat_created" in MessageObject["message"]:
# Optional. Informs that the group has been created
self.MessageGroupChatCreated = (
MessageObject["message"]["group_chat_created"]
)
if "supergroup_chat_created" in MessageObject["message"]:
# Optional. Service message: the supergroup has been created
self.SupergroupChatCreated = MessageObject["message"]["supergroup_chat_created"]
if "channel_chat_created" in MessageObject["message"]:
# Optional. Service message: the channel has been created
self.ChannelChatCreated = MessageObject["message"]["channel_chat_created"]
if "migrate_to_chat_id" in MessageObject["message"]:
# Optional. The group has been migrated to a supergroup with
# the specified identifier, not exceeding 1e13 by absolute
# value
self.MigrateToChatId = MessageObject["message"]["migrate_to_chat_id"]
if "migrate_from_chat_id" in MessageObject["message"]:
# Optional. The supergroup has been migrated from a group
# with the specified identifier, not exceeding 1e13 by
# absolute value
self.migrate_from_chat_id = MessageObject["message"]["migrate_from_chat_id"]
if "pinned_message" in MessageObject["message"]:
# Optional. Specified message was pinned. Note that the
# Message object in this field will not contain further
# reply_to_message fields even if it is itself a reply.
self.PinnedMessage = MessageObject["message"]["pinned_message"]
def _SendToQueue_(self, MessageObject):
"""
This methode will be a private function with the task to send the finished message to the postprocessing and shipping class.
Variables:
- MessageObject ``object``
is the message object that has to be send
"""
MessageObjectList = []
if MessageObject is not None:
if len(MessageObject.Text) > 4096:
TemporaryObjectHolder = MessageObject
for TextPart in MessageProcessor.Chunker(MessageObject.Text, 4095):
TemporaryObjectHolder.Text = TextPart
MessageObjectList.append(TemporaryObjectHolder)
else:
MessageObjectList.append(MessageObject)
if self.MessageSend is False:
Workload = []
if isinstance(MessageObject, list):
Workload.extend(MessageObject)
elif isinstance(MessageObject, dict):
Workload.append(MessageObject)
for Message in MessageObjectList:
self._OutputQueue_.put(Message)
def UserExists(self, ):
"""
This method will detect if the use already exists or not.
The following query will return 1 if a user with the specified
username exists a 0 otherwise.
.. code-block:: sql\n
SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'username')
It will return a True if the database returns a 1 and a False
if the database a 0.
Variables:
\-
"""
exists = self.SqlObject.ExecuteTrueQuery(
self.SqlObject.CreateCursor(Dictionary=False),
Query=("SELECT EXISTS(SELECT 1 FROM User_Table WHERE"
" External_Id = %s);"
),
Data=self.UserId
)[0][0]
if exists == 0:
return False
else:
return True
def AddUser(self, ):
"""
This method will add a new user to the database.
Variables:
\-
"""
# Insert into user
TableName = "User_Table"
Columns = {
"External_Id": self.UserId,
"User_Name": self.UserName,
"First_Name": self.UserFirstName,
"Last_Name": self.UserLastName
}
self.SqlObject.InsertEntry(self.SqlCursor, TableName, Columns)
self.SqlObject.Commit()
# insert default settings
# get default values
# get the default settings
# get the default language
FromTable = "Setting_Table"
Columns = ["Id", "Default_String"]
Where = [["Setting_Name", "=", "%s"]]
Data = ("Language")
MasterSetting = self.SqlObject.SelectEntry(
self.SqlCursor,
FromTable=FromTable,
Columns=Columns,
Where=Where,
Data=Data
)[0]
TableName = "User_Setting_Table"
self.InternalUserId = self.GetUserData()[0]
Columns = {
"Master_Setting_Id": MasterSetting["Id"],
"Set_By_User": self.InternalUserId,
"User_String": MasterSetting["Default_String"]
}
self.SqlObject.InsertEntry(
self.SqlCursor,
TableName,
Columns
)
self.SqlObject.Commit()
def GetUserData(self):
"""
This method will get the internal user id and the admin state
from the database.
Variables:
\-
"""
# first the internal user id
FromTable = "User_Table"
Columns = ["Internal_Id", "Is_Admin"]
Where = [["External_Id", "=", "%s"]]
Data = (self.UserId,)
temp = self.SqlObject.SelectEntry(
self.SqlCursor,
FromTable=FromTable,
Columns=Columns,
Where=Where,
Data=Data
)[0]
internalUserId = temp["Internal_Id"]
Is_Admin = temp["Is_Admin"]
if Is_Admin == 0:
Is_Admin = False
else:
Is_Admin = True
return internalUserId, Is_Admin
def GetMessageObject(self,):
"""
This method will generate a default message object to work with.
Variables:
\-
"""
MessageObject = message.MessageToBeSend(ToChatId=self.ChatId)
MessageObject.Text = self._("Sorry, but this command could not be"
" interpreted.")
return MessageObject
@staticmethod
def Chunker(ListOfObjects, SizeOfChunks):
"""
Yield successive n-sized (SizeOfChunks) chunks from the list (ListOfObjects).
This methode will not return anything, but act as a generator object.
Variables:
- ListOfObjects ``generator, list or string``
This variable holds all the stuff to split.
- SizeOfChunks ``integer``
Holds the size of the chunks to turn the ListOfObjects
into.
"""
for i in range(0, len(ListOfObjects), SizeOfChunks):
yield ListOfObjects[i:i+SizeOfChunks]
@staticmethod
def SpacedChunker(String, SizeOfChunks):
"""
This method will split a sting by the spaces inside and will separate them correctly.
Variables:
- String ``string``
This variable holds all the stuff to split.
SizeOfChunks ``integer``
Holds the size of the chunks to turn the ListOfObjects
into.
"""
EndList = []
StringSize = 0
TempString = ""
for i in String.split(" "):
StringSize += len(i)
if StringSize > SizeOfChunks:
TempString += i
else:
EndList.append(TempString)
StringSize = 0
TempString = ""
StringSize += len(i)
StringSize = 0
pass
return EndList
def GroupExists(self):
"""
This method checks if the group exists or not.
The following query will return a 1 if a user with the
specified username exists a 0 otherwise. From that on
the system will return True if the group exists and if it
doesn't False.\n
.. code-block:: sql\n
SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'username')
Variables:
\-
"""
# This method checks in the database if the group (if it is one)
# exists.
Exists = self.SqlObject.ExecuteTrueQuery(
self.SqlObject.CreateCursor(Dictionary=False),
Query="SELECT EXISTS(SELECT 1 FROM Group_Table WHERE"
" External_Group_Id = %s);",
Data=self.ChatId
)[0][0]
if Exists == True:
return True
else:
return False
def AddGroup(self):
"""
This method will add an not existing group to the database.
Variables:
\-
"""
# This method will add the group if it doen't exit.
self.SqlObject.InsertEntry(
self.SqlCursor,
TableName="Group_Table",
Columns={
"External_Id": self.ChatId,
"Group_Name": self.GroupName
},
)
self.SqlObject.Commit(self.SqlCursor)
def GetInternalGroupId(self):
"""
This method will get the user internal group id.
This method will return the the internal group id directly from
the database.
Variables:
\-
"""
return self.SqlObject.SelectEntry(
self.SqlCursor,
FromTable="Group_Table",
Columns=["Internal_Group_Id"],
Where=[["External_Group_Id", "=", "%s"]],
Data=self.ChatId
)
def SetLastSendCommand(self, Command, LastUsedId=None, LastUsedData = None):
"""
This method will save the last user command into the database.
The commands used can be set manually from the programmer
so that it can be user for flow control.
Example:\n
.. code-block:: guess\n
/Command option
Variables:
Command ``string``
This is the used command with the option, that was
used.
LastUsedId ``integer``
This is the last used id, it can be every id, depending
the situation.
"""
TableName = "Session_Table"
Columns = {
"Command_By_User": self.InternalUserId,
"Command": Command,
}
Duplicate = {
"Command": Command,
}
if LastUsedId is not None:
Columns["Last_Used_Id"] = LastUsedId
Duplicate["Last_Used_Id"] = LastUsedId
if LastUsedData is not None:
Columns["Last_Used_Data"] = LastUsedData
Duplicate["Last_Used_Data"] = LastUsedData
SetLastSendCommand = self.SqlObject.InsertEntry(
self.SqlCursor,
TableName=TableName,
Columns=Columns,
Duplicate=Duplicate)
self.SqlObject.Commit()
def GetLastSendCommand(self):
"""
This method will get the last user command.
This method will get the last user command from the database,
so that the last command can be used for flow control.
The command are mostly set by the system and not by the user,
at least not direct.
Example:\n
.. code-block:: guess\n
/command option
Variables:
\-
Return:
- LastSendCommand["Last_Used_Id"]
- LastSendCommand["Command"]
"""
FromTable = "Session_Table"
Columns = ["Command", "Last_Used_Id", "Last_Used_Data"]
Where = [["Command_By_User", "=", "%s"]]
Data = (self.InternalUserId,)
LastSendCommand = self.SqlObject.SelectEntry(
self.SqlCursor,
FromTable=FromTable,
Columns=Columns,
Where=Where,
Data=Data
)
if len(LastSendCommand) > 0:
LastSendCommand = LastSendCommand[0]
else:
LastSendCommand["Last_Used_Id"] = None
LastSendCommand["Command"] = None
LastSendCommand["Last_Used_Data"] = none
return LastSendCommand
def ClearLastCommand(self):
"""
This method clears the last set command if the process finished.
Variables:
\-
"""
self.SqlObject.UpdateEntry(
Cursor=self.SqlCursor,
TableName="Session_Table",
Columns={
"Command": "0",
"Last_Used_Id": 0
},
Where=[["Command_By_User", self.InternalUserId]],
Autocommit=True
)
def ChangeUserLanguage(self, Language):
"""
This method changes the user language.
This method is responsible for initialising the language change,
as well as activating the new language. It will return True
if the new language could be initialised and False if there has
been an error.
Variables:
Language ``string``
should be a string with the new language file
"""
if Language == "English":
Language = "en_US"
elif Language == "Deutsch":
Language = "de_DE"
self.SqlObject.UpdateEntry(
Cursor=self.SqlCursor,
TableName="User_Setting_Table",
Columns={"User_String": Language},
Where=[["Master_User_Id", self.InternalUserId]],
Autocommit=True
)
try:
self.LanguageName = Language
Language = self.LanguageObject.CreateTranslationObject(self.LanguageName)
self._ = self.LanguageObject.gettext
if self.LanguageObject.info()["language"] != Language:
raise ImportError(
self.M_("Unknown language error")
)
return True
except ImportError as Error:
self.LoggingObject.error("{} {}".format(
self.M_("There has been an error with the changing of the "
"language class, this error has been returned: {Error}"
).format(Error=Error),
self.M_("Please, contact your administrator.")
)
)
return False
def InterpretMessage(self):
"""
This method is here to be overriden by a child class.
Variables:
\-
"""
raise NotImplementedError
class MessageProcessor(MessagePreProcessor):
"""
This class is used as the user message analyser.
It extends the MessagePreProcessor class with the needed
methodes for analysing the message object.
The MessageObject will only contains a single message object,
so that this class will be thread save and so that we can run
multiple instances per unit.
The message object will contain all the following parts.\n
.. code-block:: python\n
{
'message': {
'date': 1439471738,
'text': '/start',
'from': {
'id': 3xxxxxx6,
'last_name': 'Sample',
'first_name': 'Max',
'username': 'TheUserName'
},
'message_id': 111,
'chat': {
'id': -xxxxxxx,
'title': 'Drive'
}
},
'update_id': 469262057
}
}
"""
def InterpretMessage(self):
"""
This method interprets the user text.
This method is used as an pre interpreter of the user send text.
It primarily chooses if the user send text is a command or not.
It will choose the correct interpretation system, if the text has
been send by a group or not.
It returns the MessageObject after letting it get modified.
Variables:
\-
"""
MessageObject = self.GetMessageObject()
# check if message is a command
if self.Text is not None:
# Analyse the text and do your stuff.
# delete the annoying bot command from the text to analyse
# If the name of the bot is used in the
# command delete the @NameOfBot
self.Text = re.sub(r"^(@\w+[bB]ot\s+)?", "", self.Text)
if self.Text.startswith("/"):
if self.InGroup is False:
MessageObject = self.InterpretUserCommand(MessageObject)
else:
MessageObject = self.InterpretGroupCommand(MessageObject)
else:
# Get the last send command and the last used id
LastSendCommand = self.GetLastSendCommand()
self.LastUsedId = LastSendCommand["Last_Used_Id"]
self.LastSendCommand = LastSendCommand["Command"]
self.LastSendData = LastSendCommand["Last_Used_Data"]
if self.InGroup is False:
MessageObject = self.InterpretUserNonCommand(MessageObject)
#else:
# MessageObject = self.InterpretGroupNonCommand(MessageObject)
else:
MessageObject = None
# checking that the lenght of the message never will be longer then
# 4096 characters long
self._SendToQueue_(MessageObject)
def InterpretUserCommand(self, MessageObject):
"""
This method interprets the commands form the user text.
This method is used as an interpreter of the user send
commands. It returns the MessageObject
after analysing and modifying the MessageObject to respond
the user Text.
Variables:
- MessageObject ``object``
is the message object that has to be modified
"""
# register the command in the database for later use
if self.Text.startswith("/start"):
MessageObject.Text = self._("Welcome.\nWhat can I do for you?"
"\nPress /help for all my commands"
)
Markup = [
["/help"],
["/list"]
]
if self.IsAdmin is True:
Markup[0].append("/admin")
MessageObject.ReplyKeyboardMarkup(Markup,
OneTimeKeyboard=True
)
self.ClearLastCommand()
# this command will list the anime content on the server
elif self.Text == "/list":
# this command will send the anime list
MessageObject.Text = self._("Sorry\nAt the moment this command is not supported")
elif self.Text == "/done":
self.Text = "/start"
MessageObject = self.InterpretUserCommand(MessageObject)
elif self.Text == "/help":
MessageObject.Text = self._(
"Work in progress! @AnimeSubBot is a bot."
)
elif self.Text == "/admin":
# if that person is an administrator.
if self.IsAdmin:
self.InterpretAdminCommands(MessageObject)
self.SetLastSendCommand("/admin", None)
else:
MessageObject.Text = self._("You don't have the right to use that command.")
# the settings are right now not supported, maybe later.
"""elif self.Text == "/settings":
# This command will send the possible setting to the user
self.SetLastSendCommand("/settings", None)
MessageObject.Text = self._("Please, choose the setting to change:"
)
MessageObject.ReplyKeyboardMarkup(
[
["/language"],
["/comming soon"]
],
OneTimeKeyboard=True
)
elif self.Text == "/language":
# This option will change the user language
# Set the last send command
self.SetLastSendCommand("/language")
MessageObject.Text = self._(
"Please choose your preferred language:"
)
MessageObject.ReplyKeyboardMarkup([
["English"],
["Deutsch"],
["Français"]
],
OneTimeKeyboard=True
)
"""
else:
# send that the command is unknown
MessageObject.Text = self._("I apologize, but this command is not supported.\n"
"Press or enter /help to get help.")
return MessageObject
def InterpretUserNonCommand(self, MessageObject):
"""
This method interprets the non commands from user text.
This method is used as an interpreter of the system set
commands and the user send text. It returns the MessageObject
after modifying it.
Variables:
MessageObject ``object``
is the message object that has to be modified
"""
if self.LastSendCommand is None:
# if there is nothing return the default.
return MessageObject
"""
if LastSendCommand == "/language":
self.ChangeUserLanguage(self.Text)
MessageObject.Text = self._("Language changed successfully.")
MessageObject.ReplyKeyboardHide()
self.ClearLastCommand()
"""
if self.LastSendCommand.startswith("/admin"):
# see that the admin commands are interpreted correctly
MessageObject = self.InterpretAdminCommands(MessageObject)
return MessageObject
def InterpretGroupCommand(self, MessageObject):
"""
This command will interpret all the group send commands.
Variables:
MessageObject ``object``
is the message object that has to be modified
"""
if self.Text == "/help":
MessageObject.Text = self._(
"Work in progress! @AnimeSubBot is a bot"
)
return MessageObject
def InterpretAdminCommands(self, MessageObject):
"""
This command will interpret all the admin send commands.
Variables:
MessageObject ``object``
is the message object that has to be modified
Commands:
Channel
- add channel
- change description
- send description
- delete channel
Anime list
- publish list
- add anime
- configure anime
- remove Anime
"""
if self.Text != "/admin":
if self.LastSendCommand == "/admin":
# the default screen
if self.Text.startswith(self._("anime")):
MessageObject.Text = self._("What do you want to do?")
MessageObject.ReplyKeyboardMarkup(
[
[self._("publish list")],
[self._("add anime")],
[self._("configure anime")],
[self._("remove anime")],
[self._("back")],
],
OneTimeKeyboard=True
)
self.SetLastSendCommand("/admin anime", None)
elif self.Text.startswith(self._("channel")):
MessageObject.Text = self._("What do you want to do?")
MessageObject.ReplyKeyboardMarkup(
[
[self._("add channel")],
[self._("change description")],
[self._("send description")],
[self._("delete channel")],
[self._("back")],
],
OneTimeKeyboard=True
)
self.SetLastSendCommand("/admin channel", None)
elif self.Text == self._("back"):
self.Text = "/start"
MessageObject = self.InterpretUserCommand(MessageObject)
elif self.LastSendCommand.startswith("/admin anime"):
# the anime commands
if self.Text == "publish list":
# 1) publish to channel
pass
elif self.Text == "add anime":
# Please enter the url and be patient while the program extracts the information. To cancel please write CANCEL. -- ;:; -> delimeter
# 1) automatic (a) vs manual entry (b)
# 2a) extract URL =?> CANCEL -> to admin
# 3a) confirm Yes -> save data / No -> to admin
# 4a) add telegram url
# 2b) enter name
# 3b) enter publish date
# 4b) enter myanimelist.net url
# 5b) enter telegram url
pass
elif self.Text == "configure anime":
# 1) search by name
# 2) show possible names (repeats until correct)
# 3) change by data => Telegram URL; Date; Name;
pass
elif self.Text == "remove anime":
# 1) search by name
# 2) show possible names (repeats until correct)
# 3) check if user is sure and then delete anime
pass
elif self.Text == "back":
self.Text = "/admin"
self.ClearLastCommand()
self.InterpretUserCommand(MessageObject)
elif self.LastSendCommand.startswith("/admin channel"):
# the channel commands
ChannelObject = Channel(self.SqlObject, self.SqlCursor)
if self.LastSendCommand.startswith("/admin channel"):
if self.Text == "add channel" or self.LastSendCommand.startswith("/admin channel"):
# add new channel
# 1) Please enter the name of the channel - enter CANSEL to exit
# 1a) back to admin hub
# 2) check if channel exists - save (a) or error (b)
# 2a) save channel name
# 2b) back to admin channnel
# 3a) enter description
# 3b) chancel => return to admin hub
# 3ab) is the text ok Yes / No
# 4a) enter buttons to use with description YES / NO
# 4b) chancel => return to admin hub
# 5a) success
if self.Text == "add channel":
MessageObject.Text = self._("Please send the name of the channel in this form @example_channel or send /done")
self.SetLastSendCommand("/admin channel add", None)
if self.LastSendCommand.startswith("/admin channel add"):
if self.LastSendCommand == "/admin channel add":
# 2) check if channel exists - save (a) or error (b)
if self.Text.startswith("@"):
# enter the channel name into the database if the channel doesnt't exists yet
if ChannelObject.ChannelExists(self.Text) is True:
# 2b) back to admin channnel
MessageObject.Text = self._("The channel already exists.\nTo change the description choose \"change description\" in the options.")
self.SetLastSendCommand("/admin channel")
else:
# 3a) enter description
ChannelObject.AddChannel(self.Text, ByUser = self.InternalUserId)
MessageObject.Text = self._("Please enter the channel description, to chancel send CANCEL")
self.SetLastSendCommand("/admin channel add channel description", LastUsedData = self.Text)
elif self.LastSendCommand == "/admin channel add description":
if self.Text != "CANCEL":
MessageObject.Text = self._("Is the description to your liking?")
MessageObject.ReplyKeyboardMarkup([
[self._("YES")],
[self._("NO")]
],
OneTimeKeyboard=True
)
# 4a) enter buttons to use with description
if self.Text != "CANCEL":
MessageObject.Text = self._("Do you wish to add buttons?")
MessageObject.ReplyKeyboardMarkup([
[self._("YES")],
[self._("NO")]
],
OneTimeKeyboard=True
)
# saving the description without buttons
ChannelObject.ChangeDescription(self.LastSendData, self.Text, ByUser = self.InternalUserId)
# saving the description without buttons
self.SetLastSendCommand("/admin channel add description buttons unsure", LastUsedData = self.LastSendData)
else:
MessageObj.Text = self._("To change the description choose \"change description\" in the options.")
self.SetLastSendCommand("/admin channel")
elif self.LastSendCommand == "/admin channel add description buttons unsure":
if self.Text == self._("YES"):
# 4a) enter buttons to use with description YES
MessageObject.Text = self._("Please send the buttons like this:\nText;Url\nText;Url")
self.SetLastSendCommand("/admin channel add description buttons sure", LastUsedData = self.LastSendData)
else:
# 4b) no => return to admin hub
self.SetLastSendCommand("/admin channel")
elif self.LastSendCommand == "/admin channel add description buttons sure":
ChannelObject.ChangeDescriptionButton(self.LastSendData, self.Text, self.InternalUserId)
Description, Buttons = ChannelObject.GetDescription()
MessageObject.Text = Description
if Buttons is not None:
for Line in Buttons.split("\n"):
Text, Url = Line.split(";")
MessageObject.AddInlineButton(Text, Url)
self._SendToQueue_(MessageObject)
elif self.Text == "change description":
pass
elif self.Text == "send description":
MessageObject.Text = Description
if Buttons is not None:
for Line in Buttons.split("\n"):
Text, Url = Line.split(";")
MessageObject.AddInlineButton(Text, Url)
elif self.Text == "delete channel":
pass
elif self.Text == "back":
self.Text = "/admin"
self.ClearLastCommand()
self.InterpretUserCommand(MessageObject)
else:
MessageObject.Text = self._("How can I help you?")
MessageObject.ReplyKeyboardMarkup(
[
[self._("anime")],
[self._("channel")],
[self._("back")],
],
OneTimeKeyboard=True
)
self.SetLastSendCommand("/admin", None)
return MessageObject
class Channel(object):
def __init__(self,
SqlObject,
Cursor):
self.SqlObject = SqlObject
self.Cursor = Cursor
def AddChannel(self, Name, Description = None, ByUser = None):
"""
This methode will insert the channel into the database.
Variables:
- Name ``string``
the true name of the channnel, this will be used as autifications methode.
- Desciption ``string``
the channnel description
- ByUser ``integer``
the user by which the channel was created by
"""
Data = {"True_Name": Name}
if Description is not None:
Data["Description"] = Description
if ByUser is not None:
Data["By_User"] = ByUser
Data["Last_Changes"] = ByUser
self.SqlObject.InsertEntry(self.Cursor,
"Channel_Table",
Data,
)
self.SqlObject.Commit()
def ChangeDescription(self, Name, Description, ByUser = None):
"""
This methode will change the description of the channel.
Variables:
- Name ``string``
the true name of the channnel, this will be used as autifications methode.
- Desciption ``string``
the channnel description
- ByUser ``string``
the user that changed the value
"""
Data = {"Description": Description}
if ByUser is not None:
Data["Last_Changes"] = ByUser
Where = [
[
"True_Name",
"=",
Name,
],
]
self.SqlObject.UpdateEntry(self.Cursor,
"Channel_Table",
Data,
Where
)
self.SqlObject.Commit()
def ChangeDescriptionButton(self, Name, Buttons, ByUser = None):
"""
This methode will change the description buttons of the channel.
Variables:
- Name ``string``
the true name of the channnel, this will be used as autifications methode.
- Desciption ``string``
the channnel description
- ByUser ``string``
the user that changed the value
"""
Data = {"Description_Buttons": Buttons}
if ByUser is not None:
Data["Last_Changes"] = ByUser
Where = [
[
"True_Name",
"=",
Name,
],
]
self.SqlObject.UpdateEntry(self.Cursor,
"Channel_Table",
Data,
Where
)
self.SqlObject.Commit()
def ChannelExists(self, Name):
"""
This method will detect if the use already exists or not.
The following query will return 1 if a user with the specified
username exists a 0 otherwise.
.. code-block:: sql\n
SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'username')
It will return a True if the database returns a 1 and a False
if the database a 0.
Variables:
\-
"""
exists = self.SqlObject.ExecuteTrueQuery(
self.SqlObject.CreateCursor(Dictionary=False),
Query=("SELECT EXISTS(SELECT 1 FROM Channel_Table WHERE"
" True_Name = %s);"
),
Data=Name
)[0][0]
if exists == 0:
return False
else:
return True
def GetChannels(self):
"""
this method will get all the channels
"""
Channels = None
Columns = ("True_Name",)
ChannelsTemp = self.SqlObject.Select(
Cursor = self.Cursor,
FromTable = "Channel_Table",
Columns = Columns,)
return Channels
def GetDescription(self, Name):
pass
class Anime(object):
def __init__(self,
SqlObject,
Cursor):
self.SqlObject = SqlObject
self.Cursor = Cursor
| gpl-2.0 | 2,357,790,934,075,367,400 | 37.005311 | 171 | 0.495638 | false | 5.296151 | false | false | false |
brutasse/django-ses | django_ses/utils.py | 2 | 6526 | import base64
import logging
from io import StringIO
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from django.core.exceptions import ImproperlyConfigured
from django.utils.encoding import smart_str
from django_ses import settings
logger = logging.getLogger(__name__)
class BounceMessageVerifier(object):
"""
A utility class for validating bounce messages
See: http://docs.amazonwebservices.com/sns/latest/gsg/SendMessageToHttp.verify.signature.html
"""
def __init__(self, bounce_dict):
"""
Creates a new bounce message from the given dict.
"""
self._data = bounce_dict
self._verified = None
def is_verified(self):
"""
Verifies an SES bounce message.
"""
if self._verified is None:
signature = self._data.get('Signature')
if not signature:
self._verified = False
return self._verified
# Decode the signature from base64
signature = base64.b64decode(signature)
# Get the message to sign
sign_bytes = self._get_bytes_to_sign()
if not sign_bytes:
self._verified = False
return self._verified
if not self.certificate:
self._verified = False
return self._verified
# Extract the public key
pkey = self.certificate.get_pubkey()
# Use the public key to verify the signature.
pkey.verify_init()
pkey.verify_update(sign_bytes)
verify_result = pkey.verify_final(signature)
self._verified = verify_result == 1
return self._verified
@property
def certificate(self):
"""
Retrieves the certificate used to sign the bounce message.
TODO: Cache the certificate based on the cert URL so we don't have to
retrieve it for each bounce message. *We would need to do it in a
secure way so that the cert couldn't be overwritten in the cache*
"""
if not hasattr(self, '_certificate'):
cert_url = self._get_cert_url()
# Only load certificates from a certain domain?
# Without some kind of trusted domain check, any old joe could
# craft a bounce message and sign it using his own certificate
# and we would happily load and verify it.
if not cert_url:
self._certificate = None
return self._certificate
try:
import requests
except ImportError:
raise ImproperlyConfigured("requests is required for bounce message verification.")
try:
import M2Crypto
except ImportError:
raise ImproperlyConfigured("M2Crypto is required for bounce message verification.")
# We use requests because it verifies the https certificate
# when retrieving the signing certificate. If https was somehow
# hijacked then all bets are off.
response = requests.get(cert_url)
if response.status_code != 200:
logger.warning('Could not download certificate from %s: "%s"', cert_url, response.status_code)
self._certificate = None
return self._certificate
# Handle errors loading the certificate.
# If the certificate is invalid then return
# false as we couldn't verify the message.
try:
self._certificate = M2Crypto.X509.load_cert_string(response.content)
except M2Crypto.X509.X509Error as e:
logger.warning('Could not load certificate from %s: "%s"', cert_url, e)
self._certificate = None
return self._certificate
def _get_cert_url(self):
"""
Get the signing certificate URL.
Only accept urls that match the domains set in the
AWS_SNS_BOUNCE_CERT_TRUSTED_DOMAINS setting. Sub-domains
are allowed. i.e. if amazonaws.com is in the trusted domains
then sns.us-east-1.amazonaws.com will match.
"""
cert_url = self._data.get('SigningCertURL')
if cert_url:
if cert_url.startswith('https://'):
url_obj = urlparse(cert_url)
for trusted_domain in settings.BOUNCE_CERT_DOMAINS:
parts = trusted_domain.split('.')
if url_obj.netloc.split('.')[-len(parts):] == parts:
return cert_url
logger.warning('Untrusted certificate URL: "%s"', cert_url)
else:
logger.warning('No signing certificate URL: "%s"', cert_url)
return None
def _get_bytes_to_sign(self):
"""
Creates the message used for signing SNS notifications.
This is used to verify the bounce message when it is received.
"""
# Depending on the message type the fields to add to the message
# differ so we handle that here.
msg_type = self._data.get('Type')
if msg_type == 'Notification':
fields_to_sign = [
'Message',
'MessageId',
'Subject',
'Timestamp',
'TopicArn',
'Type',
]
elif (msg_type == 'SubscriptionConfirmation' or
msg_type == 'UnsubscribeConfirmation'):
fields_to_sign = [
'Message',
'MessageId',
'SubscribeURL',
'Timestamp',
'Token',
'TopicArn',
'Type',
]
else:
# Unrecognized type
logger.warning('Unrecognized SNS message Type: "%s"', msg_type)
return None
outbytes = StringIO()
for field_name in fields_to_sign:
field_value = smart_str(self._data.get(field_name, ''),
errors="replace")
if field_value:
outbytes.write(field_name)
outbytes.write("\n")
outbytes.write(field_value)
outbytes.write("\n")
return outbytes.getvalue()
def verify_bounce_message(msg):
"""
Verify an SES/SNS bounce notification message.
"""
verifier = BounceMessageVerifier(msg)
return verifier.is_verified()
| mit | 7,059,662,298,711,280,000 | 33.712766 | 110 | 0.562059 | false | 4.746182 | false | false | false |
DomBennett/pG-lt | tests/test_stages_phylogeny.py | 1 | 3743 | #! /bin/usr/env python
# D.J. Bennett
# 26/05/2014
"""
Test phylogeny stage.
"""
import unittest
import os
import shutil
import pickle
from pglt.stages import phylogeny_stage as pstage
from Bio import AlignIO
from Bio import Phylo
from cStringIO import StringIO
from pglt.tools.phylogeny_tools import raxml
# DIRS
working_dir = os.path.dirname(__file__)
# FUNCTIONS
def genPhylogeny():
treedata = "(outgroup, (B, C), (D, E))"
handle = StringIO(treedata)
tree = Phylo.read(handle, "newick")
return tree
# DUMMIES
def dummyCountNPhylos(nphylos, f):
return nphylos
class DummyAlignmentStore(object):
def __init__(self, clusters, genedict, allrankids, indir, logger):
pass
class DummyGenerator(object):
phylogenies = []
def __init__(self, alignment_store, rttstat, outdir, maxtrys, logger,
wd):
pass
def run(self):
self.phylogenies.append(genPhylogeny())
return True
# TEST DATA
with open(os.path.join(working_dir, 'data', 'test_alignment_ref.faa'), 'r')\
as file:
alignment = AlignIO.read(file, 'fasta')
paradict = {'nphylos': '1', 'maxtrys': '1', 'rttstat': '0.5',
'constraint': '3'}
genedict = {}
allrankids = []
@unittest.skipIf(not raxml, "Requires RAxML")
class PhylogenyStageTestSuite(unittest.TestCase):
def setUp(self):
# stub out
self.true_AlignmentStore = pstage.ptools.AlignmentStore
self.true_Generator = pstage.ptools.Generator
self.true_countNPhylos = pstage.ptools.countNPhylos
pstage.ptools.Generator = DummyGenerator
pstage.ptools.AlignmentStore = DummyAlignmentStore
pstage.ptools.countNPhylos = dummyCountNPhylos
# create input data
os.mkdir('tempfiles')
with open(os.path.join('tempfiles', "paradict.p"), "wb") as file:
pickle.dump(paradict, file)
with open(os.path.join('tempfiles', "genedict.p"), "wb") as file:
pickle.dump(genedict, file)
with open(os.path.join('tempfiles', "allrankids.p"), "wb") as file:
pickle.dump(allrankids, file)
os.mkdir('3_alignment')
os.mkdir('4_phylogeny')
os.mkdir(os.path.join('3_alignment', 'COI'))
os.mkdir(os.path.join('3_alignment', 'rbcl'))
with open(os.path.join('3_alignment', 'rbcl',
'test_alignment_rbl.faa'), 'w') as file:
count = AlignIO.write(alignment, file, "fasta")
del count
with open(os.path.join('3_alignment', 'COI',
'test_alignment_COI.faa'), 'w') as file:
count = AlignIO.write(alignment, file, "fasta")
del count
def tearDown(self):
# remove all folders potentially generated by phylogeny stage
phylogeny_folders = ['3_alignment', '4_phylogeny', 'tempfiles']
while phylogeny_folders:
try:
phylogeny_folder = phylogeny_folders.pop()
shutil.rmtree(phylogeny_folder)
except OSError:
pass
# stub in
pstage.ptools.Generator = self.true_Generator
pstage.ptools.AlignmentStore = self.true_AlignmentStore
pstage.ptools.countNPhylos = self.true_countNPhylos
def test_phylogeny_stage(self):
# run
res = pstage.run()
# clean dir
os.remove(os.path.join('4_phylogeny', 'distribution.tre'))
os.remove(os.path.join('4_phylogeny',
'distribution_unconstrained.tre'))
os.remove(os.path.join('4_phylogeny', 'consensus.tre'))
os.rmdir('4_phylogeny')
# assert
self.assertIsNone(res)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 6,822,975,506,853,362,000 | 29.933884 | 76 | 0.610473 | false | 3.356951 | true | false | false |
benhoff/microphone | microphone/pyaudio_.py | 1 | 10383 | import io
import sys
import time
import wave
import logging
import argparse
import contextlib
from os import path
import pyaudio
import zmq
from vexmessage import create_vex_message, decode_vex_message
from microphone.command_manager import CommandManager
PYAUDIO_BIT_MAPPING = {8: pyaudio.paInt8,
16: pyaudio.paInt16,
24: pyaudio.paInt24,
32: pyaudio.paInt32}
def bits_to_samplefmt(bits):
if bits in PYAUDIO_BIT_MAPPING.keys():
return PYAUDIO_BIT_MAPPING[bits]
class PyAudio:
def __init__(self, messaging, settings):
self._pyaudio = pyaudio.PyAudio()
self.messaging = messaging
self.command_manager = CommandManager(self, messaging)
self._logger = logging.getLogger(__name__)
self._logger.info("Initializing PyAudio. ALSA/Jack error messages " +
"that pop up during this process are normal and " +
"can usually be safely ignored.")
# NOTE: pyaudio SPAMS the terminal, this seperates everything
print('\n')
self._logger.info("Initialization of PyAudio engine finished")
self.devices = {}
self.get_devices()
def __del__(self):
self._pyaudio.terminate()
def run(self):
messaging = self.messaging
# TODO: create better type here
startup_frame = create_vex_message('',
'microphone',
'STATUS',
status='recording')
messaging.publish_socket.send_multipart(startup_frame)
while True:
# NOTE: `frame` is a list of byte strings
# Once we recv here, MUST reply in order to loop again!
try:
frame = self.messaging.subscribe_socket.recv_multipart()
except KeyboardInterrupt:
break
msg = decode_vex_message(frame)
if msg.type == 'CMD':
self.command_manager.handle_command(msg)
def get_devices(self, device_type='all'):
num_devices = self._pyaudio.get_device_count()
self._logger.debug('Found %d PyAudio devices', num_devices)
for i in range(num_devices):
info = self._pyaudio.get_device_info_by_index(i)
name = info['name']
if name in self.devices:
continue
else:
self.devices[name] = PyAudioDevice(self, info)
return self.devices
"""
if device_type == plugin.audioengine.DEVICE_TYPE_ALL:
return devs
else:
return [device for device in devs if device_type in device.types]
"""
def invoke_device(self):
pass
def get_default_output_device(self):
info = self._pyaudio.get_default_output_device_info()
return PyAudioDevice(self, info)
def get_default_device(self, type='input'):
try:
info = self._pyaudio.get_default_input_device_info()
except IOError:
devices = self.get_devices(device_type=type)
if len(devices) == 0:
msg = 'No %s devices available!' % direction
self._logger.warning(msg)
raise plugin.audioengine.DeviceNotFound(msg)
try:
device = self.devices['default']
except KeyError:
self._logger.warning('default device not found')
# FIXME
device = None
return device
else:
return PyAudioDevice(self, info)
class PyAudioDevice:
def __init__(self, engine, info, context=None, address='inproc://microphone'):
super().__init__()
self._logger = logging.getLogger(__name__)
self._engine = engine
self.info = info
self._index = info['index']
self._max_output_channels = info['maxOutputChannels']
self._max_input_channels = info['maxInputChannels']
# FIXME
self._sample_width = self._engine._pyaudio.get_sample_size(pyaudio.paInt16)
self._default_sample_rate = int(self.info['defaultSampleRate'])
res_file = path.abspath(path.join(path.dirname(__file__),
'resources'))
wave_file = path.join(res_file, 'congo.wav')
wf = wave.open(wave_file, 'rb')
self._output_rate = wf.getframerate()
self._output_format = wf.getsampwidth()
self._output_channels = wf.getnchannels()
self._output_file = wave_file
wf.close()
close_file = path.join(res_file, 'done.wav')
wf = wave.open(close_file, 'rb')
self._close_rate = wf.getframerate()
self._close_format = wf.getsampwidth()
self._close_channels = wf.getnchannels()
self._close_file = close_file
def supports_format(self, bits, channels, rate, output=False):
req_dev_type = ('output' if output else 'input')
sample_fmt = bits_to_samplefmt(bits)
if not sample_fmt:
return False
direction = 'output' if output else 'input'
fmt_info = {
('%s_device' % direction): self._index,
('%s_format' % direction): sample_fmt,
('%s_channels' % direction): channels,
'rate': rate
}
try:
supported = self._engine._pyaudio.is_format_supported(**fmt_info)
except ValueError as e:
if e.args in (('Sample format not supported', -9994),
('Invalid sample rate', -9997),
('Invalid number of channels', -9998)):
return False
else:
raise
else:
return supported
@contextlib.contextmanager
def open_stream(self,
bits,
channels,
rate=None,
chunksize=1024,
output=True):
if rate is None:
rate = int(self.info['defaultSampleRate'])
# Check if format is supported
is_supported_fmt = self.supports_format(bits, channels, rate,
output=output)
if not is_supported_fmt:
msg_fmt = ("PyAudioDevice {index} ({name}) doesn't support " +
"%s format (Int{bits}, {channels}-channel at" +
" {rate} Hz)") % ('output' if output else 'input')
msg = msg_fmt.format(index=self.index,
name=self.name,
bits=bits,
channels=channels,
rate=rate)
self._logger.critical(msg)
raise plugin.audioengine.UnsupportedFormat(msg)
# Everything looks fine, open the stream
direction = ('output' if output else 'input')
stream_kwargs = {
'format': bits_to_samplefmt(bits),
'channels': channels,
'rate': rate,
'output': output,
'input': not output,
('%s_device_index' % direction): self._index,
'frames_per_buffer': chunksize if output else chunksize*8 # Hacky
}
stream = self._engine._pyaudio.open(**stream_kwargs)
"""
self._logger.debug("%s stream opened on device '%s' (%d Hz, %d " +
"channel, %d bit)", "output" if output else "input",
self.slug, rate, channels, bits)
"""
try:
yield stream
finally:
stream.close()
"""
self._logger.debug("%s stream closed on device '%s'",
"output" if output else "input", self.slug)
"""
def play_beep(self):
chunksize = 1024
f = self._engine._pyaudio.get_format_from_width(self._output_format)
stream = self._engine._pyaudio.open(format=f,
channels = self._output_channels,
rate = self._output_rate,
output=True)
wf = wave.open(self._output_file)
data = wf.readframes(chunksize)
while len(data) > 0:
stream.write(data)
data = wf.readframes(chunksize)
stream.stop_stream()
stream.close()
def play_done(self):
chunksize = 1024
f = self._engine._pyaudio.get_format_from_width(self._close_format)
stream = self._engine._pyaudio.open(format=f,
channels = self._close_channels,
rate = self._output_rate,
output=True)
wf = wave.open(self._close_file)
data = wf.readframes(chunksize)
while len(data) > 0:
stream.write(data)
data = wf.readframes(chunksize)
stream.stop_stream()
stream.close()
def record(self, chunksize, *args):
channels = args[1]
with self.open_stream(*args, chunksize=chunksize,
output=False) as stream:
record_seconds = 5
rate = int(self.info['defaultSampleRate'])
steps = int(rate/chunksize * record_seconds)
data_list = io.BytesIO()
# NOTE: need the rate info and sample width for ASR
for _ in range(steps):
try:
data_list.write(stream.read(chunksize))
except IOError as e:
if type(e.errno) is not int:
# Simple hack to work around the fact that thmt_from_width
# errno/strerror arguments were swapped in older
# PyAudio versions. This was fixed in upstream
# commit 1783aaf9bcc6f8bffc478cb5120ccb6f5091b3fb.
strerror, errno = e.errno, e.strerror
else:
strerror, errno = e.strerror, e.errno
self._logger.warning("IO error while reading from device" +
" '%s': '%s' (Errno: %d)", self.slug,
strerror, errno)
return data_list.getvalue()
| gpl-3.0 | 4,754,117,668,439,196,000 | 35.689046 | 83 | 0.522007 | false | 4.444777 | false | false | false |
doctori/OpenBicycleDatabase | webScrapping/openBicycleDatabase/testVeloBase.py | 1 | 2785 | import scrapy
import time
from scrapy.selector import Selector
from selenium import webdriver
import traceback
class VeloBaseScrapper(scrapy.Spider):
name = 'velobase'
start_urls = ['http://velobase.com/ListComponents.aspx?ClearFilter=true']
def __init__(self):
self.driver = webdriver.Firefox()
def __del__(self):
self.driver.stop()
def parse(self, response):
hxs = Selector(response)
self.driver.get(response.url)
links = []
while True:
try:
self.logger.info("="*60)
self.logger.info("HXS %s ", hxs.xpath('//tr[@class="GroupHeader1"]/td/a/text()').extract()[0])
self.logger.info("="*60)
self.logger.info("="*60)
# self.logger.info("LINKS %s ", hxs.xpath('//table[@class="content"]/tr[@class="content_normal" or @class="content_alternate"]/td[0]/a[@class="ttiptxt"]/@href).extract()[0]'))
links.extend(hxs.xpath(
'//table[@class="content"]//tr[@class="content_normal" or @class="content_alternate"]/td/a[@class=" ttiptxt"]/@href'
).extract())
self.logger.info("LINKS %s ", links)
self.logger.info("="*60)
for link in links:
full_url = response.urljoin(link)
yield scrapy.Request(full_url, callback=self.parse_details)
nextPage = self.driver.find_element_by_link_text('Next')
nextPage.click()
time.sleep(3)
hxs = Selector(text=self.driver.page_source)
except:
traceback.print_exc()
break
def parse_details(self, response):
componentCategory = response.xpath('//td[@id="ctl00_ContentPlaceHolder1_GenInfo"]/table/tr[1]/td/text()').extract()[-1]
componentName = response.xpath('//td[@id="ctl00_ContentPlaceHolder1_GenInfo"]/table/tr[2]/td/text()').extract()[-1]
componentBrand = response.xpath('//td[@id="ctl00_ContentPlaceHolder1_GenInfo"]/table/tr[3]/td/a/text()').extract()[-1]
componentCountry = response.xpath('//td[@id="ctl00_ContentPlaceHolder1_GenInfo"]/table/tr[7]/td/text()').extract()[-1]
componentDescription = response.xpath('//td[@id="ctl00_ContentPlaceHolder1_GenInfo"]/table/tr/td[contains(text(),"Country:")]/following-sibling::td').extract()
self.logger.info("-" * 70)
self.logger.info(" COMPONENT %s ", componentName)
self.logger.info("-" * 70)
yield {
'category': componentCategory,
'name': componentName,
'brand': componentBrand,
'country': componentCountry,
'description': componentDescription
}
| gpl-2.0 | 8,169,897,393,561,473,000 | 41.846154 | 192 | 0.576302 | false | 3.783967 | false | false | false |
losonczylab/Zaremba_NatNeurosci_2017 | enrichment_model/enrichment_model_plotting.py | 1 | 7208 | import matplotlib.pyplot as plt
import cPickle as pkl
import numpy as np
import seaborn.apionly as sns
from lab.plotting import histogram
def enrichment(positions):
distances = np.abs(positions[np.isfinite(positions)])
return np.mean(distances), np.std(distances) / np.sqrt(len(distances))
def calc_enrichment(pos, masks):
enrich = []
for rep_positions, rep_masks in zip(pos, masks):
enrich.append(
[np.pi / 2 - enrichment(iter_positions[iter_mask])[0]
for iter_positions, iter_mask in zip(
rep_positions, rep_masks)])
return enrich
def calc_final_distributions(pos, masks):
final_dist = []
for rep_positions, rep_masks in zip(pos, masks):
final_dist.extend(rep_positions[-1][rep_masks[-1]].tolist())
return final_dist
def plot_enrichment(ax, enrichment, color, title='', rad=True):
ax.plot(range(9), np.mean(enrichment, axis=0), color=color)
ax.plot(range(9), np.percentile(enrichment, 5, axis=0), ls='--',
color=color)
ax.plot(range(9), np.percentile(enrichment, 95, axis=0), ls='--',
color=color)
ax.fill_between(
range(9), np.percentile(enrichment, 5, axis=0),
np.percentile(enrichment, 95, axis=0), facecolor=color, alpha=0.5)
sns.despine(ax=ax)
ax.tick_params(length=3, pad=2, direction='out')
ax.set_xlim(-0.5, 8.5)
if rad:
ax.set_ylim(-0.15, 0.5)
ax.set_ylabel('Enrichment (rad)')
else:
ax.set_ylim(-0.15, 0.10 * 2 * np.pi)
y_ticks = np.array(['0', '0.05', '0.10'])
ax.set_yticks(y_ticks.astype('float') * 2 * np.pi)
ax.set_yticklabels(y_ticks)
ax.set_ylabel('Enrichment (fraction of belt)')
ax.set_xlabel("Iteration ('session' #)")
ax.set_title(title)
def plot_final_distributions(
ax, final_dists, colors, labels=None, title='', rad=True):
if labels is None:
labels = [None] * len(final_dists)
for final_dist, color, label in zip(final_dists, colors, labels):
histogram(
ax, final_dist, bins=50, range=(-np.pi, np.pi),
color=color, filled=False, plot_mean=False, normed=True,
label=label)
ax.tick_params(length=3, pad=2, direction='out')
ax.axvline(ls='--', color='0.3')
ax.set_xlim(-np.pi, np.pi)
if rad:
ax.set_xlabel('Distance from reward (rad)')
else:
ax.set_xlabel('Distance from reward (fraction of belt)')
ax.set_xticks([-np.pi, -np.pi / 2, 0, np.pi / 2, np.pi])
ax.set_xticklabels(['-0.50', '-0.25', '0', '0.25', '0.50'])
ax.set_ylim(0, 0.3)
ax.set_ylabel('Normalized density')
ax.set_title(title)
def plot_parameters(axs, model, enrich):
positions = np.linspace(-np.pi, np.pi, 1000)
bs, ks = model.shift_mean_var(positions)
recur = model.recur_by_position(positions)
axs[0].plot(positions, recur)
axs[0].set_xlim(-np.pi, np.pi)
axs[0].set_ylim(0., 1.)
axs[0].set_xlabel('Position')
axs[0].set_ylabel('Recurrence probability')
axs[1].plot(positions, bs)
axs[1].set_xlim(-np.pi, np.pi)
axs[1].set_xlabel('Position')
axs[1].set_ylabel('Offset')
axs[2].plot(positions, 1 / ks)
axs[2].set_xlim(-np.pi, np.pi)
axs[2].set_xlabel('Position')
axs[2].set_ylabel('Variance')
axs[3].plot(range(9), np.mean(enrich, axis=0), color='b')
axs[3].fill_between(
range(9), np.percentile(enrich, 5, axis=0),
np.percentile(enrich, 95, axis=0), facecolor='b', alpha=0.5)
axs[3].set_xlabel('Iteration')
axs[3].set_ylabel('Enrichment (rad)')
def plot_models(
models, model_labels=None, n_cells=1000, n_runs=100, n_iterations=8):
if model_labels is None:
model_labels = ['Model {}'.format(idx) for idx in range(len(models))]
fig, axs = plt.subplots(4, len(models), figsize=(10, 10))
models[0].initialize(n_cells=n_cells)
for model in models[1:]:
model.initialize_like(models[0])
initial_mask = models[0].mask
initial_positions = models[0].positions
masks = []
positions = []
enrichment = []
for model, model_axs in zip(models, axs.T):
masks.append([])
positions.append([])
for _ in range(n_runs):
model.initialize(
initial_mask=initial_mask, initial_positions=initial_positions)
model.run(n_iterations)
masks[-1].append(model._masks)
positions[-1].append(model._positions)
enrichment.append(calc_enrichment(positions[-1], masks[-1]))
plot_parameters(model_axs, model, enrichment[-1])
for ax in axs[:, 1:].flat:
ax.set_ylabel('')
for ax in axs[:2, :].flat:
ax.set_xlabel('')
for label, ax in zip(model_labels, axs[0]):
ax.set_title(label)
offset_min, offset_max = np.inf, -np.inf
for ax in axs[1]:
offset_min = min(offset_min, ax.get_ylim()[0])
offset_max = max(offset_max, ax.get_ylim()[1])
for ax in axs[1]:
ax.set_ylim(offset_min, offset_max)
var_min, var_max = np.inf, -np.inf
for ax in axs[2]:
var_min = min(var_min, ax.get_ylim()[0])
var_max = max(var_max, ax.get_ylim()[1])
for ax in axs[2]:
ax.set_ylim(var_min, var_max)
enrich_min, enrich_max = np.inf, -np.inf
for ax in axs[3]:
enrich_min = min(enrich_min, ax.get_ylim()[0])
enrich_max = max(enrich_max, ax.get_ylim()[1])
for ax in axs[3]:
ax.set_ylim(enrich_min, enrich_max)
return fig
if __name__ == '__main__':
import enrichment_model as em
import enrichment_model_theoretical as emt
params_path_A = '/analysis/Jeff/Df16A/Df_remap_paper_v2/data/enrichment_model/Df_model_params_A.pkl'
params_path_B = '/analysis/Jeff/Df16A/Df_remap_paper_v2/data/enrichment_model/Df_model_params_B.pkl'
params_path_C = '/analysis/Jeff/Df16A/Df_remap_paper_v2/data/enrichment_model/Df_model_params_C.pkl'
#
# WT to theoretical
#
# WT_params_path = params_path_C
# WT_params = pkl.load(open(WT_params_path, 'r'))
# WT_model = em.EnrichmentModel2(**WT_params)
# recur_model = emt.EnrichmentModel2_recur(
# kappa=1, span=0.8, mean_recur=0.4, **WT_params)
# offset_model = emt.EnrichmentModel2_offset(alpha=0.25, **WT_params)
# var_model = emt.EnrichmentModel2_var(
# kappa=1, alpha=10, mean_k=3, **WT_params)
# models = [WT_model, recur_model, offset_model, var_model]
# model_labels = ['WT model', 'Stable recurrence', 'Shift towards reward',
# 'Stable position']
params_A = pkl.load(open(params_path_A, 'r'))
params_B = pkl.load(open(params_path_B, 'r'))
params_C = pkl.load(open(params_path_C, 'r'))
model_A = em.EnrichmentModel2(**params_A)
model_B = em.EnrichmentModel2(**params_B)
model_C = em.EnrichmentModel2(**params_C)
models = [model_A, model_B, model_C]
model_labels = ['A', 'B', 'C']
fig = plot_models(
models, model_labels, n_cells=1000, n_runs=100, n_iterations=8)
fig.savefig('Df_model_parameters.pdf')
from pudb import set_trace
set_trace()
| mit | -686,580,195,003,819,100 | 31.468468 | 104 | 0.602109 | false | 2.927701 | false | false | false |
hakujyo/chessplaying_robot | s.py | 1 | 3258 | import cv2
import numpy as np
import time
from matplotlib import pyplot as plt
cap = cv2.VideoCapture(1) # 读入视频文件
#timeF = 0 #计时器
start=False
if cap.isOpened(): # 判断是否正常打开
rval, frame = cap.read()
start = True
else:
rval = False
time1 = time.time() #计时器
while start: # 循环读取视频帧
rval, frame = cap.read()
cv2.imshow("capture", frame) #视频窗口
time2 = time.time()
timeF = time2 - time1
timeF = int(timeF)
print(timeF)
if (timeF % 10 == 0): # 每隔timeF帧进行存储操作
count='%d'%timeF
url='%d'%timeF + ".png"
#src='%d'%c
cv2.imwrite(url, frame) # 存储为图像
#读图矫正
a = cv2.imread(url)
rows, cols, channels = a.shape
list1 = np.float32([[86, 21], [514, 12], [39, 464], [566, 462]])
list2 = np.float32([[0, 0], [720, 0], [0, 720], [720, 720]])
M = cv2.getPerspectiveTransform(list1, list2)
img_perspective = cv2.warpPerspective(a, M, (720, 720))
print('perspective:\n', M)
cv2.imwrite(url, img_perspective) #矫正后图片
# cv2.imshow(url, img_perspective)
cv2.waitKey(5)
#保存黑棋灰度差值图
if timeF != 0 and (timeF / 10) % 2 == 1 :
a = cv2.imread(url)
src= '%d'%(timeF-10)
lasturl = src + '.png'
print(lasturl)
b = cv2.imread(lasturl)
Graya = cv2.cvtColor(a,cv2.COLOR_BGR2GRAY)
Grayb = cv2.cvtColor(b,cv2.COLOR_BGR2GRAY)
c = Grayb - Graya;
Grayurl='sub'+count+'.png'
cv2.imwrite(Grayurl, c) #灰度图
#cv2.imshow(Grayurl, c)
#模板匹配
x=0
y=0
img = cv2.imread(Grayurl, 0)
img2 = img.copy()
template = cv2.imread('test.png', 0)
w, h = template.shape[::-1]
methods = ['cv2.TM_SQDIFF']
for meth in methods:
img = img2.copy()
method = eval(meth)
# Apply template Matching
res = cv2.matchTemplate(img, template, method)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
# If the method is TM_SQDIFF or TM_SQDIFF_NORMED, take minimum
if method in [cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]:
top_left = min_loc
else:
top_left = max_loc
bottom_right = (top_left[0] + w, top_left[1] + h)
print(int(top_left[0] + w / 2), int(top_left[1] + h / 2))
cv2.rectangle(img, top_left, bottom_right, 255, 2)
plt.figure()
plt.subplot(121), plt.imshow(res, cmap='gray')
plt.title('Matching Result'), plt.xticks([]), plt.yticks([])
plt.subplot(122), plt.imshow(img, cmap='gray')
plt.title('Detected Point'), plt.xticks([]), plt.yticks([])
plt.suptitle(meth)
#plt.show() #显示模板匹配图
cap.release()
| gpl-3.0 | 3,384,259,414,345,706,500 | 26.740741 | 78 | 0.484536 | false | 2.900935 | false | false | false |
hepochen/py-discount | setup.py | 1 | 1675 | #coding: utf8
import os
import shutil
import subprocess
import glob
from distutils.core import setup
def install_discount():
root = os.path.dirname(os.path.abspath(__file__))
os.chdir('_discount')
subprocess.call('chmod 755 configure.sh'.split())
subprocess.call(
['./configure.sh',
'--with-fenced-code',
'--with-urlencoded-anchor',
'--enable-all-features',
'--shared',
], env=os.environ)
subprocess.call(['make', 'install'])
os.chdir(root)
_md_path = None
for path in glob.glob('_discount/libmarkdown.so*') + glob.glob('_discount/libmarkdown.dylib'):
if not os.path.islink(path):
_md_path = path
break
if not _md_path:
return # ignore
md_path = os.path.join(root, 'discount/markdown.so')
shutil.copy(_md_path, md_path )
install_discount()
setup(
name='discount',
license='BSD',
version='0.2.1STABLE',
author='Trapeze',
author_email='[email protected]',
url="http://github.com/trapeze/python-discount",
download_url='http://pypi.python.org/pypi/discount',
description='A Python interface for Discount, the C Markdown parser',
long_description=open('README.rst').read(),
keywords='markdown discount ctypes',
packages = ['discount', ],
package_data={'discount': ['markdown.so']},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: C',
'Programming Language :: Python',
'Topic :: Text Processing :: Markup'
],
) | bsd-3-clause | -4,911,564,756,195,660,000 | 26.032258 | 98 | 0.613731 | false | 3.841743 | false | false | false |
google-research/torchsde | torchsde/_core/methods/tableaus/srid2.py | 1 | 1225 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# From "RUNGE-KUTTA METHODS FOR THE STRONG APPROXIMATION OF SOLUTIONS OF STOCHASTIC DIFFERENTIAL EQUATIONS".
# For diagonal noise structure.
# (ODE order, SDE strong order) = (3.0, 1.5).
STAGES = 4
C0 = (0, 1, 1 / 2, 0)
C1 = (0, 1 / 4, 1, 1 / 4)
A0 = (
(),
(1,),
(1 / 4, 1 / 4),
(0, 0, 0)
)
A1 = (
(),
(1 / 4,),
(1, 0),
(0, 0, 1 / 4)
)
B0 = (
(),
(0,),
(1, 1 / 2),
(0, 0, 0),
)
B1 = (
(),
(-1 / 2,),
(1, 0),
(2, -1, 1 / 2)
)
alpha = (1 / 6, 1 / 6, 2 / 3, 0)
beta1 = (-1, 4 / 3, 2 / 3, 0)
beta2 = (1, -4 / 3, 1 / 3, 0)
beta3 = (2, -4 / 3, -2 / 3, 0)
beta4 = (-2, 5 / 3, -2 / 3, 1)
| apache-2.0 | -7,428,444,745,646,069,000 | 21.685185 | 108 | 0.568163 | false | 2.663043 | false | false | false |
josiahw/pyrover | src/Mapping/BresenhamAlgorithms.py | 1 | 5482 | """
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <http://unlicense.org/>
@author: Josiah Walker
"""
import numpy
def BresenhamLine(A,B):
"""
This implements the bresenham algorithm to draw a line from A to B
Returns: all x,y index pairs that are in the line
"""
if A[0] == B[0]: #if A and B share the same X coord, draw a straight line in Y
increment = 1 if B[1] >= A[1] else -1 #decide whether to draw forwards or backwards
return [(A[0],i) for i in xrange(A[1],B[1]+increment,increment)]
elif A[1] == B[1]: #if A and B share the same Y coord, draw a straight line in X
increment = 1 if B[0] >= A[0] else -1 #decide whether to draw forwards or backwards
return [(i,A[1]) for i in xrange(A[0],B[0]+increment,increment)]
else: #this draws a diagonal line
incrementx = 1 if B[0] >= A[0] else -1 #set the direction of line drawing
incrementy = 1 if B[1] >= A[1] else -1
result = []
yval = A[1] #set Y start
slope = A-B #calculate slope - assuming A and B are numpy arrays
slope = abs(slope[1]/float(slope[0]))
error = 0.0 #initialise Y error counter
for i in xrange(A[0],B[0]+incrementx,incrementx): #for all X values, step forward in X
result.append((i,yval))
error += slope
while error >= 0.5: #while the Y error is too large, step forward in Y
yval += incrementy
error -= 1.0
result.append((i,yval))
return result
def BresenhamBorder(A,B):
"""
Unlike the line, this does only one pixel per Y row, so it can be used in fill algorithms efficiently
Returns: all x,y index pairs that are in the border
"""
if A[0] == B[0]: #if A and B share the same X coord, draw a straight line in Y
increment = 1 if B[1] >= A[1] else -1
return [(A[0],i) for i in xrange(A[1],B[1]+increment,increment)]
elif A[1] == B[1]: #we're screwed - we can only return one Y value
return [numpy.round((A+B)/2).astype(numpy.int32)]
else: #work out what to do for a diagonal
incrementy = 1 if B[1] >= A[1] else -1 #set the direction of line drawing
slope = A-B
slope = slope[0]/float(slope[1])*incrementy
xvals = numpy.round(A[0] + slope*numpy.arange(0.,abs(A[1]-B[1])+1,1.)).astype(numpy.int32)
return [(xvals[i], y) for i,y in enumerate(xrange(A[1],B[1]+incrementy,incrementy))]
def BresenhamPolygon(vertices):
"""
Rasterizes a convex polygon from a list of 2d int vertices.
All pixels within the polygon are returned as a list.
"""
#put the largest value at the head of the list:
maxvert = 0
for i in xrange(len(vertices)):
if vertices[i][1] > vertices[maxvert][1]:
maxvert = i
vertices = vertices[maxvert:] + vertices[:maxvert]
#split the list in to two sides based on max->min paths
minvert = 0
for i in xrange(len(vertices)):
if vertices[i][1] < vertices[minvert][1]:
minvert = i
#skip everything of equal Y height on the top
start = 0
while start < len(vertices)-2 and vertices[start][1] == vertices[start+1][1]:
start += 1
side1 = vertices[start:minvert+1]
#create the "left" border
l = BresenhamBorder(side1[0],side1[1])
for i in xrange(1,len(side1)-1):
l += BresenhamBorder(side1[i],side1[i+1])[1:]
#skip everything of equal Y height on the bottom
while minvert < len(vertices)-2 and vertices[minvert][1] == vertices[minvert+1][1]:
minvert += 1
side2 = vertices[minvert:]
side2.reverse()
side2 = [vertices[0]] + side2
#create the "right" border
r = BresenhamBorder(side2[0],side2[1])
for i in xrange(1,len(side2)-1):
r += BresenhamBorder(side2[i],side2[i+1])[1:]
#do horizontal scans and save all the cell locations in the triangle
result = []
for i in xrange(len(l)):
increment = 1 if r[i][0] >= l[i][0] else -1
result += [(j,l[i][1]) for j in xrange(l[i][0],r[i][0]+increment,increment)]
return result
def BresenhamTriangle(A,B,C):
#this is here because not all the functions have been upgraded to polygon yet
return BresenhamPolygon([A,B,C])
| bsd-2-clause | 5,967,930,163,098,792,000 | 40.530303 | 105 | 0.643013 | false | 3.487277 | false | false | false |
Johnetordoff/osf.io | api_tests/nodes/views/test_node_contributors_list.py | 6 | 109793 | # -*- coding: utf-8 -*-
from datetime import datetime
import mock
import pytest
import random
from nose.tools import * # noqa:
from api.base.settings.defaults import API_BASE
from api.nodes.serializers import NodeContributorsCreateSerializer
from framework.auth.core import Auth
from osf_tests.factories import (
fake_email,
AuthUserFactory,
OSFGroupFactory,
ProjectFactory,
UnconfirmedUserFactory,
UserFactory,
)
from osf.utils import permissions
from rest_framework import exceptions
from tests.base import capture_signals, fake
from website.project.signals import contributor_added, contributor_removed
from api_tests.utils import disconnected_from_listeners
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.enable_implicit_clean
class NodeCRUDTestCase:
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def user_two(self):
return AuthUserFactory()
@pytest.fixture()
def title(self):
return 'Cool Project'
@pytest.fixture()
def title_new(self):
return 'Super Cool Project'
@pytest.fixture()
def description(self):
return 'A Properly Cool Project'
@pytest.fixture()
def description_new(self):
return 'An even cooler project'
@pytest.fixture()
def category(self):
return 'data'
@pytest.fixture()
def category_new(self):
return 'project'
@pytest.fixture()
def project_public(self, user, title, description, category):
return ProjectFactory(
title=title,
description=description,
category=category,
is_public=True,
creator=user
)
@pytest.fixture()
def project_private(self, user, title, description, category):
return ProjectFactory(
title=title,
description=description,
category=category,
is_public=False,
creator=user
)
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/'.format(API_BASE, project_public._id)
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/'.format(API_BASE, project_private._id)
@pytest.fixture()
def url_fake(self):
return '/{}nodes/{}/'.format(API_BASE, '12345')
@pytest.fixture()
def make_contrib_id(self):
def contrib_id(node_id, user_id):
return '{}-{}'.format(node_id, user_id)
return contrib_id
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.enable_implicit_clean
class TestNodeContributorList(NodeCRUDTestCase):
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/'.format(
API_BASE, project_private._id)
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/'.format(API_BASE, project_public._id)
def test_concatenated_id(self, app, user, project_public, url_public):
res = app.get(url_public)
assert res.status_code == 200
assert res.json['data'][0]['id'].split('-')[0] == project_public._id
assert res.json['data'][0]['id'] == '{}-{}'.format(
project_public._id, user._id)
def test_permissions_work_with_many_users(
self, app, user, project_private, url_private):
users = {
permissions.ADMIN: [user._id],
permissions.WRITE: [],
permissions.READ: []
}
for i in range(0, 25):
perm = random.choice(list(users.keys()))
user = AuthUserFactory()
project_private.add_contributor(user, permissions=perm)
users[perm].append(user._id)
res = app.get(url_private, auth=user.auth)
data = res.json['data']
for user in data:
api_perm = user['attributes']['permission']
user_id = user['id'].split('-')[1]
assert user_id in users[api_perm], 'Permissions incorrect for {}. Should not have {} permission.'.format(
user_id, api_perm)
def test_return(
self, app, user, user_two, project_public, project_private,
url_public, url_private, make_contrib_id):
# test_return_public_contributor_list_logged_in
res = app.get(url_public, auth=user_two.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 1
assert res.json['data'][0]['id'] == make_contrib_id(
project_public._id, user._id)
# test_return_private_contributor_list_logged_out
res = app.get(url_private, expect_errors=True)
assert res.status_code == 401
assert 'detail' in res.json['errors'][0]
# test_return_private_contributor_list_logged_in_non_contributor
res = app.get(url_private, auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
assert 'detail' in res.json['errors'][0]
# test_return_private_contributor_list_logged_in_osf_group_member
res = app.get(url_private, auth=user_two.auth, expect_errors=True)
osf_group = OSFGroupFactory(creator=user_two)
project_private.add_osf_group(osf_group, permissions.READ)
res = app.get(url_private, auth=user_two.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 1
assert res.json['data'][0]['id'] == make_contrib_id(
project_private._id, user._id)
def test_return_public_contributor_list_logged_out(
self, app, user, user_two, project_public, url_public, make_contrib_id):
project_public.add_contributor(user_two, save=True)
res = app.get(url_public)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 2
assert res.json['data'][0]['id'] == make_contrib_id(
project_public._id, user._id)
assert res.json['data'][1]['id'] == make_contrib_id(
project_public._id, user_two._id)
def test_return_private_contributor_list_logged_in_contributor(
self, app, user, user_two, project_private, url_private, make_contrib_id):
project_private.add_contributor(user_two)
project_private.save()
res = app.get(url_private, auth=user.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 2
assert res.json['data'][0]['id'] == make_contrib_id(
project_private._id, user._id)
assert res.json['data'][1]['id'] == make_contrib_id(
project_private._id, user_two._id)
def test_filtering_on_obsolete_fields(self, app, user, url_public):
# regression test for changes in filter fields
url_fullname = '{}?filter[fullname]=foo'.format(url_public)
res = app.get(url_fullname, auth=user.auth, expect_errors=True)
assert res.status_code == 400
errors = res.json['errors']
assert len(errors) == 1
assert errors[0]['detail'] == '\'fullname\' is not a valid field for this endpoint.'
# middle_name is now middle_names
url_middle_name = '{}?filter[middle_name]=foo'.format(url_public)
res = app.get(url_middle_name, auth=user.auth, expect_errors=True)
assert res.status_code == 400
errors = res.json['errors']
assert len(errors) == 1
assert errors[0]['detail'] == '\'middle_name\' is not a valid field for this endpoint.'
def test_disabled_contributors_contain_names_under_meta(
self, app, user, user_two, project_public, url_public, make_contrib_id):
project_public.add_contributor(user_two, save=True)
user_two.is_disabled = True
user_two.save()
res = app.get(url_public)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 2
assert res.json['data'][0]['id'] == make_contrib_id(
project_public._id, user._id)
assert res.json['data'][1]['id'] == make_contrib_id(
project_public._id, user_two._id)
assert res.json['data'][1]['embeds']['users']['errors'][0]['meta']['full_name'] == user_two.fullname
assert res.json['data'][1]['embeds']['users']['errors'][0]['detail'] == 'The requested user is no longer available.'
def test_total_bibliographic_contributor_count_returned_in_metadata(
self, app, user_two, project_public, url_public):
non_bibliographic_user = UserFactory()
project_public.add_contributor(
non_bibliographic_user,
visible=False,
auth=Auth(project_public.creator))
project_public.save()
res = app.get(url_public, auth=user_two.auth)
assert res.status_code == 200
assert res.json['links']['meta']['total_bibliographic'] == len(
project_public.visible_contributor_ids)
def test_unregistered_contributor_field_is_null_if_account_claimed(
self, app, user):
project = ProjectFactory(creator=user, is_public=True)
url = '/{}nodes/{}/contributors/'.format(API_BASE, project._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert res.json['data'][0]['attributes'].get(
'unregistered_contributor') is None
def test_unregistered_contributors_show_up_as_name_associated_with_project(
self, app, user):
project = ProjectFactory(creator=user, is_public=True)
project.add_unregistered_contributor(
'Robert Jackson',
'[email protected]',
auth=Auth(user), save=True)
url = '/{}nodes/{}/contributors/'.format(API_BASE, project._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 2
assert res.json['data'][1]['embeds']['users']['data']['attributes']['full_name'] == 'Robert Jackson'
assert res.json['data'][1]['attributes'].get(
'unregistered_contributor') == 'Robert Jackson'
project_two = ProjectFactory(creator=user, is_public=True)
project_two.add_unregistered_contributor(
'Bob Jackson', '[email protected]', auth=Auth(user), save=True)
url = '/{}nodes/{}/contributors/'.format(API_BASE, project_two._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 2
assert res.json['data'][1]['embeds']['users']['data']['attributes']['full_name'] == 'Robert Jackson'
assert res.json['data'][1]['attributes'].get(
'unregistered_contributor') == 'Bob Jackson'
def test_contributors_order_is_the_same_over_multiple_requests(
self, app, user, project_public, url_public):
project_public.add_unregistered_contributor(
'Robert Jackson',
'[email protected]',
auth=Auth(user), save=True
)
for i in range(0, 10):
new_user = AuthUserFactory()
if i % 2 == 0:
visible = True
else:
visible = False
project_public.add_contributor(
new_user,
visible=visible,
auth=Auth(project_public.creator),
save=True
)
req_one = app.get(
'{}?page=2'.format(url_public),
auth=Auth(project_public.creator))
req_two = app.get(
'{}?page=2'.format(url_public),
auth=Auth(project_public.creator))
id_one = [item['id'] for item in req_one.json['data']]
id_two = [item['id'] for item in req_two.json['data']]
for a, b in zip(id_one, id_two):
assert a == b
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.enable_implicit_clean
class TestNodeContributorAdd(NodeCRUDTestCase):
@pytest.fixture()
def user_three(self):
return AuthUserFactory()
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/?send_email=false'.format(
API_BASE, project_private._id)
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/?send_email=false'.format(
API_BASE, project_public._id)
@pytest.fixture()
def data_user_two(self, user_two):
return {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id,
}
}
}
}
}
@pytest.fixture()
def data_user_three(self, user_three):
return {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_three._id,
}
}
}
}
}
def test_add_contributors_errors(
self, app, user, user_two, user_three, url_public):
# test_add_node_contributors_relationships_is_a_list
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': [{'contributor_id': user_three._id}]
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == exceptions.ParseError.default_detail
# test_add_contributor_no_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'A user ID or full name must be provided to add a contributor.'
# test_add_contributor_empty_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'A user ID or full name must be provided to add a contributor.'
# test_add_contributor_no_user_key_in_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'id': user_two._id,
'type': 'users'
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == exceptions.ParseError.default_detail
# test_add_contributor_no_data_in_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'id': user_two._id
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Request must include /data.'
# test_add_contributor_no_target_type_in_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Request must include /type.'
# test_add_contributor_no_target_id_in_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'A user ID or full name must be provided to add a contributor.'
# test_add_contributor_incorrect_target_id_in_relationships
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': '12345'
}
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 404
# test_add_contributor_no_type
data = {
'data': {
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['source']['pointer'] == '/data/type'
# test_add_contributor_incorrect_type
data = {
'data': {
'type': 'Incorrect type',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 409
# test_unregistered_contributor_invalid_email
data = {
'data': {
'type': 'contributors',
'attributes': {
'permission': 'admin',
'email': '[email protected]',
'full_name': 'John Doe'
}
}
}
res = app.post_json_api(
url_public, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Unregistered contributor email address domain is blacklisted.'
def test_contributor_create_invalid_data(
self, app, user_three, url_public):
res = app.post_json_api(
url_public,
'Incorrect data',
auth=user_three.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == exceptions.ParseError.default_detail
res = app.post_json_api(
url_public,
['Incorrect data'],
auth=user_three.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == exceptions.ParseError.default_detail
def test_add_contributor_dont_expose_email(
self, app, user, user_two, project_public, data_user_two, url_public):
res = app.post_json_api(
url_public,
data_user_two,
auth=user.auth)
assert res.status_code == 201
assert res.json['data']['attributes'].get('email') is None
def test_add_contributor_is_visible_by_default(
self, app, user, user_two, project_public,
data_user_two, url_public):
del data_user_two['data']['attributes']['bibliographic']
res = app.post_json_api(
url_public,
data_user_two,
auth=user.auth,
expect_errors=True)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_public._id, user_two._id)
project_public.reload()
assert user_two in project_public.contributors
assert project_public.get_visible(user_two)
def test_adds_bibliographic_contributor_public_project_admin(
self, app, user, user_two, project_public, data_user_two, url_public):
res = app.post_json_api(url_public, data_user_two, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_public._id, user_two._id)
project_public.reload()
assert user_two in project_public.contributors
def test_adds_non_bibliographic_contributor_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': False
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_private, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
assert res.json['data']['attributes']['bibliographic'] is False
project_private.reload()
assert user_two in project_private.contributors
assert not project_private.get_visible(user_two)
def test_adds_contributor_public_project_non_admin(
self, app, user, user_two, user_three,
project_public, data_user_three, url_public):
project_public.add_contributor(
user_two,
permissions=permissions.WRITE,
auth=Auth(user),
save=True)
res = app.post_json_api(url_public, data_user_three,
auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
project_public.reload()
assert user_three not in project_public.contributors.all()
def test_adds_contributor_public_project_non_admin_osf_group(
self, app, user, user_two, user_three,
project_public, data_user_three, url_public):
group = OSFGroupFactory(creator=user_two)
project_public.add_osf_group(group, permissions.WRITE)
res = app.post_json_api(url_public, data_user_three,
auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
project_public.reload()
assert user_three not in project_public.contributors.all()
def test_adds_contributor_public_project_non_contributor(
self, app, user_two, user_three, project_public, data_user_three, url_public):
res = app.post_json_api(url_public, data_user_three,
auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
assert user_three not in project_public.contributors.all()
def test_adds_contributor_public_project_not_logged_in(
self, app, user_two, project_public, data_user_two, url_public):
res = app.post_json_api(url_public, data_user_two, expect_errors=True)
assert res.status_code == 401
assert user_two not in project_public.contributors.all()
def test_adds_contributor_private_project_admin(
self, app, user, user_two, project_private,
data_user_two, url_private):
res = app.post_json_api(url_private, data_user_two, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
project_private.reload()
assert user_two in project_private.contributors
def test_adds_contributor_private_project_osf_group_admin_perms(
self, app, user, user_two, user_three, project_private,
data_user_two, url_private):
osf_group = OSFGroupFactory(creator=user_three)
project_private.add_osf_group(osf_group, permissions.ADMIN)
res = app.post_json_api(url_private, data_user_two, auth=user_three.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
project_private.reload()
assert user_two in project_private.contributors
def test_adds_contributor_without_bibliographic_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_private, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 201
project_private.reload()
assert user_two in project_private.contributors
def test_adds_admin_contributor_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(url_private, data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
project_private.reload()
assert user_two in project_private.contributors
assert project_private.get_permissions(user_two) == [
permissions.READ, permissions.WRITE, permissions.ADMIN]
def test_adds_write_contributor_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.WRITE
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(url_private, data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
project_private.reload()
assert user_two in project_private.contributors
assert project_private.get_permissions(
user_two) == [permissions.READ, permissions.WRITE]
def test_adds_read_contributor_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.READ
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(url_private, data, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['id'] == '{}-{}'.format(
project_private._id, user_two._id)
project_private.reload()
assert user_two in project_private.contributors
assert project_private.get_permissions(user_two) == [
permissions.READ]
def test_adds_invalid_permission_contributor_private_project_admin(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': 'invalid',
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_private, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
project_private.reload()
assert user_two not in project_private.contributors.all()
def test_adds_none_permission_contributor_private_project_admin_uses_default_permissions(
self, app, user, user_two, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': None
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
}
res = app.post_json_api(url_private, data, auth=user.auth)
assert res.status_code == 201
project_private.reload()
assert user_two in project_private.contributors
assert project_private.has_permission(user_two, permissions.WRITE)
def test_adds_already_existing_contributor_private_project_admin(
self, app, user, user_two, project_private, data_user_two, url_private):
project_private.add_contributor(user_two, auth=Auth(user), save=True)
project_private.reload()
res = app.post_json_api(url_private, data_user_two,
auth=user.auth, expect_errors=True)
assert res.status_code == 400
def test_adds_non_existing_user_private_project_admin(
self, app, user, project_private, url_private):
data = {
'data': {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'id': 'FAKE',
'type': 'users'
}
}
}
}
}
res = app.post_json_api(
url_private, data, auth=user.auth,
expect_errors=True)
assert res.status_code == 404
project_private.reload()
assert len(project_private.contributors) == 1
def test_adds_contributor_private_project_non_admin(
self, app, user, user_two, user_three,
project_private, data_user_three, url_private):
project_private.add_contributor(
user_two,
permissions=permissions.WRITE,
auth=Auth(user))
res = app.post_json_api(
url_private, data_user_three,
auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
project_private.reload()
assert user_three not in project_private.contributors.all()
def test_adds_contributor_private_project_non_contributor(
self, app, user_two, user_three, project_private, data_user_three, url_private):
res = app.post_json_api(url_private, data_user_three,
auth=user_two.auth, expect_errors=True)
assert res.status_code == 403
project_private.reload()
assert user_three not in project_private.contributors.all()
def test_adds_contributor_private_project_not_logged_in(
self, app, user_two, project_private, data_user_two, url_private):
res = app.post_json_api(url_private, data_user_two, expect_errors=True)
assert res.status_code == 401
project_private.reload()
assert user_two not in project_private.contributors.all()
def test_add_unregistered_contributor_with_fullname(
self, app, user, project_public, url_public):
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'John Doe',
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
assert res.json['data']['attributes']['unregistered_contributor'] == 'John Doe'
assert res.json['data']['attributes'].get('email') is None
assert res.json['data']['embeds']['users']['data']['id'] in project_public.contributors.values_list(
'guids___id', flat=True)
def test_add_contributor_with_fullname_and_email_unregistered_user(
self, app, user, project_public, url_public):
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'John Doe',
'email': '[email protected]'
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
assert res.json['data']['attributes']['unregistered_contributor'] == 'John Doe'
assert res.json['data']['attributes'].get('email') is None
assert res.json['data']['attributes']['bibliographic'] is True
assert res.json['data']['attributes']['permission'] == permissions.WRITE
assert res.json['data']['embeds']['users']['data']['id'] in project_public.contributors.values_list(
'guids___id', flat=True)
def test_add_contributor_with_fullname_and_email_unregistered_user_set_attributes(
self, app, user, project_public, url_public):
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'John Doe',
'email': '[email protected]',
'bibliographic': False,
'permission': permissions.READ
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
assert res.json['data']['attributes']['unregistered_contributor'] == 'John Doe'
assert res.json['data']['attributes'].get('email') is None
assert res.json['data']['attributes']['bibliographic'] is False
assert res.json['data']['attributes']['permission'] == permissions.READ
assert res.json['data']['embeds']['users']['data']['id'] in project_public.contributors.values_list(
'guids___id', flat=True)
def test_add_contributor_with_fullname_and_email_registered_user(
self, app, user, project_public, url_public):
user_contrib = UserFactory()
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': user_contrib.fullname,
'email': user_contrib.username
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
assert res.json['data']['attributes']['unregistered_contributor'] is None
assert res.json['data']['attributes'].get('email') is None
assert res.json['data']['embeds']['users']['data']['id'] in project_public.contributors.values_list(
'guids___id', flat=True)
def test_add_unregistered_contributor_already_contributor(
self, app, user, project_public, url_public):
name, email = fake.name(), fake_email()
project_public.add_unregistered_contributor(
auth=Auth(user), fullname=name, email=email)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Doesn\'t Matter',
'email': email
}
}
}
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
project_public.reload()
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '{} is already a contributor.'.format(
name)
def test_add_contributor_user_is_deactivated_registered_payload(
self, app, user, url_public):
user_contrib = UserFactory()
user_contrib.date_disabled = datetime.utcnow()
user_contrib.save()
payload = {
'data': {
'type': 'contributors',
'attributes': {},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_contrib._id
}
}
}
}
}
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added as contributors.'
def test_add_contributor_user_is_deactivated_unregistered_payload(
self, app, user, url_public):
user_contrib = UserFactory()
user_contrib.date_disabled = datetime.utcnow()
user_contrib.save()
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': user_contrib.fullname,
'email': user_contrib.username
},
}
}
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Deactivated users cannot be added as contributors.'
def test_add_contributor_index_returned(
self, app, user, data_user_two,
data_user_three, url_public):
res = app.post_json_api(url_public, data_user_two, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['attributes']['index'] == 1
res = app.post_json_api(url_public, data_user_three, auth=user.auth)
assert res.status_code == 201
assert res.json['data']['attributes']['index'] == 2
def test_add_contributor_set_index_out_of_range(
self, app, user, user_two, project_public, url_public):
user_contrib_one = UserFactory()
project_public.add_contributor(user_contrib_one, save=True)
user_contrib_two = UserFactory()
project_public.add_contributor(user_contrib_two, save=True)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'index': 4
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '4 is not a valid contributor index for node with id {}'.format(
project_public._id)
def test_add_contributor_set_index_first(
self, app, user, user_two, project_public, url_public):
user_contrib_one = UserFactory()
project_public.add_contributor(user_contrib_one, save=True)
user_contrib_two = UserFactory()
project_public.add_contributor(user_contrib_two, save=True)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'index': 0
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
contributor_obj = project_public.contributor_set.get(user=user_two)
index = list(
project_public.get_contributor_order()
).index(contributor_obj.pk)
assert index == 0
def test_add_contributor_set_index_last(
self, app, user, user_two, project_public, url_public):
user_contrib_one = UserFactory()
project_public.add_contributor(user_contrib_one, save=True)
user_contrib_two = UserFactory()
project_public.add_contributor(user_contrib_two, save=True)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'index': 3
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
project_public.reload()
assert res.status_code == 201
contributor_obj = project_public.contributor_set.get(user=user_two)
index = list(
project_public.get_contributor_order()
).index(contributor_obj.pk)
assert index == 3
def test_add_inactive_merged_user_as_contributor(
self, app, user, url_public):
primary_user = UserFactory()
merged_user = UserFactory(merged_by=primary_user)
payload = {
'data': {
'type': 'contributors',
'attributes': {},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': merged_user._id
}
}
}
}
}
res = app.post_json_api(url_public, payload, auth=user.auth)
assert res.status_code == 201
contributor_added = res.json['data']['embeds']['users']['data']['id']
assert contributor_added == primary_user._id
def test_add_unconfirmed_user_by_guid(
self, app, user, project_public, url_public):
unconfirmed_user = UnconfirmedUserFactory()
payload = {
'data': {
'type': 'contributors',
'attributes': {},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': unconfirmed_user._id
}
}
}
}
}
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
assert res.status_code == 404
# if adding unregistered contrib by guid, fullname must be supplied
assert (
res.json['errors'][0]['detail'] ==
'Cannot add unconfirmed user {} to resource {}. You need to provide a full_name.'
.format(unconfirmed_user._id, project_public._id))
payload['data']['attributes']['full_name'] = 'Susan B. Anthony'
res = app.post_json_api(
url_public, payload,
auth=user.auth, expect_errors=True)
assert res.status_code == 201
assert res.json['data']['attributes']['unregistered_contributor'] == 'Susan B. Anthony'
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.enable_implicit_clean
class TestNodeContributorCreateValidation(NodeCRUDTestCase):
@pytest.fixture()
def create_serializer(self):
return NodeContributorsCreateSerializer
@pytest.fixture()
def validate_data(self, create_serializer):
return create_serializer.validate_data
def test_add_contributor_validation(self, project_public, validate_data, create_serializer):
# test_add_contributor_validation_user_id
validate_data(
create_serializer(),
project_public,
user_id='abcde')
# test_add_contributor_validation_user_id_fullname
validate_data(
create_serializer(),
project_public,
user_id='abcde',
full_name='Kanye')
# test_add_contributor_validation_user_id_email
with pytest.raises(exceptions.ValidationError):
validate_data(
create_serializer(),
project_public,
user_id='abcde',
email='[email protected]')
# test_add_contributor_validation_user_id_fullname_email
with pytest.raises(exceptions.ValidationError):
validate_data(
create_serializer(),
project_public,
user_id='abcde',
full_name='Kanye',
email='[email protected]')
# test_add_contributor_validation_fullname
validate_data(
create_serializer(),
project_public,
full_name='Kanye')
# test_add_contributor_validation_email
with pytest.raises(exceptions.ValidationError):
validate_data(
create_serializer(),
project_public,
email='[email protected]')
# test_add_contributor_validation_fullname_email
validate_data(
create_serializer(),
project_public,
full_name='Kanye',
email='[email protected]')
@pytest.mark.django_db
@pytest.mark.enable_bookmark_creation
@pytest.mark.enable_enqueue_task
class TestNodeContributorCreateEmail(NodeCRUDTestCase):
@pytest.fixture()
def url_project_contribs(self, project_public):
return '/{}nodes/{}/contributors/'.format(API_BASE, project_public._id)
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_contributor_no_email_if_false(
self, mock_mail, app, user, url_project_contribs):
url = '{}?send_email=false'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
'email': '[email protected]'
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert mock_mail.call_count == 0
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_contributor_sends_email(
self, mock_mail, app, user, user_two,
url_project_contribs):
url = '{}?send_email=default'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert mock_mail.call_count == 1
@mock.patch('website.project.signals.contributor_added.send')
def test_add_contributor_signal_if_default(
self, mock_send, app, user, user_two, url_project_contribs):
url = '{}?send_email=default'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
args, kwargs = mock_send.call_args
assert res.status_code == 201
assert 'default' == kwargs['email_template']
def test_add_contributor_signal_preprint_email_disallowed(
self, app, user, user_two, url_project_contribs):
url = '{}?send_email=preprint'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
}
res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'preprint is not a valid email preference.'
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_unregistered_contributor_sends_email(
self, mock_mail, app, user, url_project_contribs):
url = '{}?send_email=default'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
'email': '[email protected]'
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
assert res.status_code == 201
assert mock_mail.call_count == 1
@mock.patch('website.project.signals.unreg_contributor_added.send')
def test_add_unregistered_contributor_signal_if_default(
self, mock_send, app, user, url_project_contribs):
url = '{}?send_email=default'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
'email': '[email protected]'
}
}
}
res = app.post_json_api(url, payload, auth=user.auth)
args, kwargs = mock_send.call_args
assert res.status_code == 201
assert 'default' == kwargs['email_template']
def test_add_unregistered_contributor_signal_preprint_email_disallowed(
self, app, user, url_project_contribs):
url = '{}?send_email=preprint'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
'email': '[email protected]'
}
}
}
res = app.post_json_api(url, payload, auth=user.auth, expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'preprint is not a valid email preference.'
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_contributor_invalid_send_email_param(
self, mock_mail, app, user, url_project_contribs):
url = '{}?send_email=true'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
'email': '[email protected]'
}
}
}
res = app.post_json_api(
url, payload, auth=user.auth,
expect_errors=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'true is not a valid email preference.'
assert mock_mail.call_count == 0
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_unregistered_contributor_without_email_no_email(
self, mock_mail, app, user, url_project_contribs):
url = '{}?send_email=default'.format(url_project_contribs)
payload = {
'data': {
'type': 'contributors',
'attributes': {
'full_name': 'Kanye West',
}
}
}
with capture_signals() as mock_signal:
res = app.post_json_api(url, payload, auth=user.auth)
assert contributor_added in mock_signal.signals_sent()
assert res.status_code == 201
assert mock_mail.call_count == 0
@pytest.mark.django_db
class TestNodeContributorBulkCreate(NodeCRUDTestCase):
@pytest.fixture()
def user_three(self):
return AuthUserFactory()
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/?send_email=false'.format(
API_BASE, project_public._id)
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/?send_email=false'.format(
API_BASE, project_private._id)
@pytest.fixture()
def payload_one(self, user_two):
return {
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
},
'relationships': {
'users': {
'data': {
'id': user_two._id,
'type': 'users'
}
}
}
}
@pytest.fixture()
def payload_two(self, user_three):
return {
'type': 'contributors',
'attributes': {
'bibliographic': False,
'permission': permissions.READ
},
'relationships': {
'users': {
'data': {
'id': user_three._id,
'type': 'users'
}
}
}
}
def test_node_contributor_bulk_create_contributor_exists(
self, app, user, user_two, project_public,
payload_one, payload_two, url_public):
project_public.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
res = app.post_json_api(
url_public,
{'data': [payload_two, payload_one]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert 'is already a contributor' in res.json['errors'][0]['detail']
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 2
def test_node_contributor_bulk_create_errors(
self, app, user, user_two, project_private,
payload_one, payload_two, url_public, url_private):
# test_bulk_create_contributors_blank_request
res = app.post_json_api(
url_public, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
# test_node_contributor_bulk_create_logged_out_public_project
res = app.post_json_api(
url_public,
{'data': [payload_one, payload_two]},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
# test_node_contributor_bulk_create_logged_out_private_project
res = app.post_json_api(
url_private,
{'data': [payload_one, payload_two]},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 1
# test_node_contributor_bulk_create_logged_in_non_contrib_private_project
res = app.post_json_api(url_private, {'data': [payload_one, payload_two]},
auth=user_two.auth, expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
# test_node_contributor_bulk_create_logged_in_read_only_contrib_private_project
project_private.add_contributor(
user_two, permissions=permissions.READ, save=True)
res = app.post_json_api(
url_private,
{'data': [payload_two]},
auth=user_two.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
def test_node_contributor_bulk_create_logged_in_public_project_project(
self, app, user, payload_one, payload_two, url_public):
res = app.post_json_api(
url_public,
{'data': [payload_one, payload_two]},
auth=user.auth, bulk=True)
assert res.status_code == 201
assert_equals([res.json['data'][0]['attributes']['bibliographic'],
res.json['data'][1]['attributes']['bibliographic']], [True, False])
assert_equals([res.json['data'][0]['attributes']['permission'],
res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ])
assert res.content_type == 'application/vnd.api+json'
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
def test_node_contributor_bulk_create_logged_in_contrib_private_project(
self, app, user, payload_one, payload_two, url_private):
res = app.post_json_api(url_private, {'data': [payload_one, payload_two]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 201
assert len(res.json['data']) == 2
assert_equals([res.json['data'][0]['attributes']['bibliographic'],
res.json['data'][1]['attributes']['bibliographic']], [True, False])
assert_equals([res.json['data'][0]['attributes']['permission'],
res.json['data'][1]['attributes']['permission']], [permissions.ADMIN, permissions.READ])
assert res.content_type == 'application/vnd.api+json'
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
def test_node_contributor_bulk_create_payload_errors(
self, app, user, user_two, payload_one, payload_two, url_public):
# test_node_contributor_bulk_create_all_or_nothing
invalid_id_payload = {
'type': 'contributors',
'attributes': {
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': '12345'
}
}
}
}
res = app.post_json_api(
url_public,
{'data': [payload_one, invalid_id_payload]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 404
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
# test_node_contributor_bulk_create_limits
node_contrib_create_list = {'data': [payload_one] * 101}
res = app.post_json_api(url_public, node_contrib_create_list,
auth=user.auth, expect_errors=True, bulk=True)
assert res.json['errors'][0]['detail'] == 'Bulk operation limit is 100, got 101.'
assert res.json['errors'][0]['source']['pointer'] == '/data'
# test_node_contributor_ugly_payload
payload = 'sdf;jlasfd'
res = app.post_json_api(
url_public, payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == exceptions.ParseError.default_detail
# test_node_contributor_bulk_create_invalid_permissions_all_or_nothing
payload = {
'type': 'contributors',
'attributes': {
'permission': 'super-user',
'bibliographic': True
},
'relationships': {
'users': {
'data': {
'type': 'users',
'id': user_two._id
}
}
}
}
payload = {'data': [payload_two, payload]}
res = app.post_json_api(
url_public, payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
@pytest.mark.django_db
class TestNodeContributorBulkUpdate(NodeCRUDTestCase):
@pytest.fixture()
def user_three(self):
return AuthUserFactory()
@pytest.fixture()
def user_four(self):
return AuthUserFactory()
@pytest.fixture()
def project_public(
self, user, user_two, user_three, title,
description, category):
project_public = ProjectFactory(
title=title,
description=description,
category=category,
is_public=True,
creator=user
)
project_public.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
project_public.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True)
return project_public
@pytest.fixture()
def project_private(
self, user, user_two, user_three,
title, description, category):
project_private = ProjectFactory(
title=title,
description=description,
category=category,
is_public=False,
creator=user
)
project_private.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
project_private.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True)
return project_private
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/'.format(API_BASE, project_public._id)
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/'.format(
API_BASE, project_private._id)
@pytest.fixture()
def payload_public_one(self, user_two, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_two._id),
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
}
}
@pytest.fixture()
def payload_private_one(self, user_two, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_two._id),
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
}
}
@pytest.fixture()
def payload_public_two(self, user_three, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_three._id),
'type': 'contributors',
'attributes': {
'bibliographic': False,
'permission': permissions.WRITE
}
}
@pytest.fixture()
def payload_private_two(
self, user_three, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_three._id),
'type': 'contributors',
'attributes': {
'bibliographic': False,
'permission': permissions.WRITE
}
}
def test_bulk_update_contributors_errors(
self, app, user, user_two, user_four, project_public,
payload_public_one, payload_public_two,
payload_private_one, payload_private_two,
url_public, url_private, make_contrib_id):
# test_bulk_update_contributors_blank_request
res = app.patch_json_api(
url_public, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
# test_bulk_update_contributors_dict_instead_of_list
res = app.put_json_api(
url_public,
{'data': payload_public_one},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
# test_bulk_update_contributors_public_project_one_not_found
invalid_id = {
'id': '12345-abcde',
'type': 'contributors',
'attributes': {}
}
empty_payload = {'data': [invalid_id, payload_public_one]}
res = app.put_json_api(
url_public, empty_payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_update_contributors_public_projects_logged_out
res = app.put_json_api(
url_public,
{
'data': [payload_public_one,
payload_public_two]
},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_update_contributors_private_projects_logged_out
res = app.put_json_api(
url_private,
{
'data': [payload_private_one,
payload_private_two]
},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
# test_bulk_update_contributors_private_projects_logged_in_non_contrib
res = app.put_json_api(
url_private,
{
'data': [payload_private_one,
payload_private_two]
},
auth=user_four.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
# test_bulk_update_contributors_private_projects_logged_in_read_only_contrib
res = app.put_json_api(
url_private,
{
'data': [payload_private_one,
payload_private_two]
},
auth=user_two.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_update_contributors_projects_send_dictionary_not_list
res = app.put_json_api(
url_public,
{'data': payload_public_one},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Expected a list of items but got type "dict".'
# test_bulk_update_contributors_id_not_supplied
res = app.put_json_api(
url_public,
{'data': [{
'type': 'contributors',
'attributes': {}
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['detail'] == 'Contributor identifier not provided.'
# test_bulk_update_contributors_type_not_supplied
res = app.put_json_api(
url_public,
{'data': [{
'id': make_contrib_id(
project_public._id, user_two._id
),
'attributes': {}
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['source']['pointer'] == '/data/0/type'
assert res.json['errors'][0]['detail'] == 'This field may not be null.'
# test_bulk_update_contributors_wrong_type
invalid_type = {
'id': make_contrib_id(project_public._id, user_two._id),
'type': 'Wrong type.',
'attributes': {}
}
res = app.put_json_api(url_public, {'data': [invalid_type]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 409
# test_bulk_update_contributors_invalid_id_format
invalid_id = {
'id': '12345',
'type': 'contributors',
'attributes': {}
}
res = app.put_json_api(url_public, {'data': [invalid_id]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Contributor identifier incorrectly formatted.'
# test_bulk_update_contributors_wrong_id
invalid_id = {
'id': '12345-abcde',
'type': 'contributors',
'attributes': {}
}
res = app.put_json_api(
url_public, {'data': [invalid_id]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.'
# test_bulk_update_contributors_limits
contrib_update_list = {'data': [payload_public_one] * 101}
res = app.put_json_api(
url_public, contrib_update_list,
auth=user.auth, expect_errors=True, bulk=True)
assert res.json['errors'][0]['detail'] == 'Bulk operation limit is 100, got 101.'
assert res.json['errors'][0]['source']['pointer'] == '/data'
# test_bulk_update_contributors_invalid_permissions
res = app.put_json_api(
url_public,
{
'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id, user_two._id
),
'type': 'contributors',
'attributes': {
'permission': 'super-user'}
}
]
},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '"super-user" is not a valid choice.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_update_contributors_invalid_bibliographic
res = app.put_json_api(
url_public,
{
'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id, user_two._id
),
'type': 'contributors',
'attributes': {
'bibliographic': 'true and false'
}
}
]
},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '"true and false" is not a valid boolean.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_update_contributors_must_have_at_least_one_bibliographic_contributor
res = app.put_json_api(
url_public,
{
'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id, user._id
),
'type': 'contributors',
'attributes': {
'permission': permissions.ADMIN,
'bibliographic': False
}
}, {
'id': make_contrib_id(
project_public._id, user_two._id
),
'type': 'contributors',
'attributes': {
'bibliographic': False
}
}
]
},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Must have at least one visible contributor'
# test_bulk_update_contributors_must_have_at_least_one_admin
res = app.put_json_api(
url_public,
{'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id, user._id
),
'type': 'contributors',
'attributes': {
'permission': permissions.READ
}
}
]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '{} is the only admin.'.format(
user.fullname)
def test_bulk_update_contributors_public_projects_logged_in(
self, app, user, payload_public_one, payload_public_two, url_public):
res = app.put_json_api(
url_public,
{'data': [payload_public_one, payload_public_two]},
auth=user.auth, bulk=True
)
assert res.status_code == 200
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission']],
[permissions.ADMIN, permissions.WRITE]
)
def test_bulk_update_contributors_private_projects_logged_in_contrib(
self, app, user, payload_private_one, payload_private_two, url_private):
res = app.put_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
auth=user.auth, bulk=True
)
assert res.status_code == 200
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission']],
[permissions.ADMIN, permissions.WRITE]
)
@pytest.mark.django_db
class TestNodeContributorBulkPartialUpdate(NodeCRUDTestCase):
@pytest.fixture()
def user_three(self):
return AuthUserFactory()
@pytest.fixture()
def user_four(self):
return AuthUserFactory()
@pytest.fixture()
def project_public(
self, user, user_two, user_three, title,
description, category):
project_public = ProjectFactory(
title=title,
description=description,
category=category,
is_public=True,
creator=user
)
project_public.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True
)
project_public.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True
)
return project_public
@pytest.fixture()
def project_private(
self, user, user_two, user_three, title,
description, category):
project_private = ProjectFactory(
title=title,
description=description,
category=category,
is_public=False,
creator=user
)
project_private.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
project_private.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True)
return project_private
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/'.format(API_BASE, project_public._id)
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/'.format(
API_BASE, project_private._id)
@pytest.fixture()
def payload_public_one(self, user_two, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_two._id),
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
}
}
@pytest.fixture()
def payload_public_two(self, user_three, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_three._id),
'type': 'contributors',
'attributes': {
'bibliographic': False,
'permission': permissions.WRITE
}
}
@pytest.fixture()
def payload_private_one(self, user_two, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_two._id),
'type': 'contributors',
'attributes': {
'bibliographic': True,
'permission': permissions.ADMIN
}
}
@pytest.fixture()
def payload_private_two(
self, user_three, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_three._id),
'type': 'contributors',
'attributes': {
'bibliographic': False,
'permission': permissions.WRITE
}
}
def test_bulk_partial_update_errors(
self, app, user, user_two, user_four,
project_public, payload_public_one,
payload_public_two, payload_private_one,
payload_private_two, url_public,
url_private, make_contrib_id):
# test_bulk_partial_update_contributors_blank_request
res = app.patch_json_api(
url_public, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
# test_bulk_partial_update_contributors_public_project_one_not_found
invalid_id = {
'id': '12345-abcde',
'type': 'contributors',
'attributes': {}
}
empty_payload = {'data': [invalid_id, payload_public_one]}
res = app.patch_json_api(
url_public, empty_payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_partial_update_contributors_public_projects_logged_out
res = app.patch_json_api(
url_public,
{'data': [payload_public_one, payload_public_two]},
bulk=True, expect_errors=True)
assert res.status_code == 401
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_partial_update_contributors_private_projects_logged_out
res = app.patch_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
expect_errors=True, bulk=True
)
assert res.status_code == 401
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
# test_bulk_partial_update_contributors_private_projects_logged_in_non_contrib
res = app.patch_json_api(
url_private,
{'data': [payload_private_one,
payload_private_two]},
auth=user_four.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ]
)
# test_bulk_partial_update_contributors_private_projects_logged_in_read_only_contrib
res = app.patch_json_api(
url_private,
{'data': [payload_private_one,
payload_private_two]},
auth=user_two.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
# test_bulk_partial_update_contributors_projects_send_dictionary_not_list
res = app.patch_json_api(
url_public,
{'data': payload_public_one},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Expected a list of items but got type "dict".'
# test_bulk_partial_update_contributors_id_not_supplied
res = app.patch_json_api(
url_public,
{'data': [{
'type': 'contributors',
'attributes': {}
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['detail'] == 'Contributor identifier not provided.'
# test_bulk_partial_update_contributors_type_not_supplied
res = app.patch_json_api(
url_public,
{'data': [{
'id': make_contrib_id(
project_public._id,
user_two._id
),
'attributes': {}
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['source']['pointer'] == '/data/0/type'
assert res.json['errors'][0]['detail'] == 'This field may not be null.'
# test_bulk_partial_update_contributors_wrong_type
invalid_type = {
'id': make_contrib_id(project_public._id, user_two._id),
'type': 'Wrong type.',
'attributes': {}
}
res = app.patch_json_api(
url_public, {'data': [invalid_type]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 409
# test_bulk_partial_update_contributors_wrong_id
invalid_id = {
'id': '12345-abcde',
'type': 'contributors',
'attributes': {}
}
res = app.patch_json_api(
url_public, {'data': [invalid_id]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to update.'
# test_bulk_partial_update_contributors_limits
contrib_update_list = {'data': [payload_public_one] * 101}
res = app.patch_json_api(
url_public, contrib_update_list,
auth=user.auth, expect_errors=True, bulk=True)
assert res.json['errors'][0]['detail'] == 'Bulk operation limit is 100, got 101.'
assert res.json['errors'][0]['source']['pointer'] == '/data'
# test_bulk_partial_update_invalid_permissions
res = app.patch_json_api(
url_public,
{
'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id,
user_two._id
),
'type': 'contributors',
'attributes': {'permission': 'super-user'}
}]
},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '"super-user" is not a valid choice.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
# test_bulk_partial_update_invalid_bibliographic
res = app.patch_json_api(
url_public,
{
'data': [
payload_public_two, {
'id': make_contrib_id(
project_public._id, user_two._id),
'type': 'contributors',
'attributes': {'bibliographic': 'true and false'}
}
]
},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == '"true and false" is not a valid boolean.'
res = app.get(url_public, auth=user.auth)
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission'],
data[2]['attributes']['permission']],
[permissions.ADMIN, permissions.READ, permissions.READ])
def test_bulk_partial_update_contributors_public_projects_logged_in(
self, app, user, payload_public_one, payload_public_two, url_public):
res = app.patch_json_api(
url_public,
{'data': [payload_public_one, payload_public_two]},
auth=user.auth, bulk=True)
assert res.status_code == 200
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission']],
[permissions.ADMIN, permissions.WRITE])
def test_bulk_partial_update_contributors_private_projects_logged_in_contrib(
self, app, user, payload_private_one, payload_private_two, url_private):
res = app.patch_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
auth=user.auth, bulk=True)
assert res.status_code == 200
data = res.json['data']
assert_equals(
[data[0]['attributes']['permission'],
data[1]['attributes']['permission']],
[permissions.ADMIN, permissions.WRITE])
class TestNodeContributorBulkDelete(NodeCRUDTestCase):
@pytest.fixture()
def user_three(self):
return AuthUserFactory()
@pytest.fixture()
def user_four(self):
return AuthUserFactory()
@pytest.fixture()
def project_public(
self, user, user_two, user_three, title,
description, category):
project_public = ProjectFactory(
title=title,
description=description,
category=category,
is_public=True,
creator=user
)
project_public.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
project_public.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True)
return project_public
@pytest.fixture()
def project_private(
self, user, user_two, user_three, title,
description, category):
project_private = ProjectFactory(
title=title,
description=description,
category=category,
is_public=False,
creator=user
)
project_private.add_contributor(
user_two,
permissions=permissions.READ,
visible=True, save=True)
project_private.add_contributor(
user_three,
permissions=permissions.READ,
visible=True, save=True)
return project_private
@pytest.fixture()
def url_public(self, project_public):
return '/{}nodes/{}/contributors/'.format(API_BASE, project_public._id)
@pytest.fixture()
def url_private(self, project_private):
return '/{}nodes/{}/contributors/'.format(
API_BASE, project_private._id)
@pytest.fixture()
def payload_public_one(self, user_two, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_two._id),
'type': 'contributors'
}
@pytest.fixture()
def payload_public_two(self, user_three, project_public, make_contrib_id):
return {
'id': make_contrib_id(project_public._id, user_three._id),
'type': 'contributors'
}
@pytest.fixture()
def payload_private_one(self, user_two, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_two._id),
'type': 'contributors',
}
@pytest.fixture()
def payload_private_two(
self, user_three, project_private, make_contrib_id):
return {
'id': make_contrib_id(project_private._id, user_three._id),
'type': 'contributors',
}
def test_bulk_delete_contributors_errors(
self, app, user, user_two, user_four,
project_public, payload_public_one,
payload_public_two, payload_private_one,
payload_private_two, url_public,
url_private, make_contrib_id):
# test_bulk_delete_contributors_blank_request
res = app.delete_json_api(
url_public, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
# test_bulk_delete_invalid_id_format
res = app.delete_json_api(
url_public,
{'data': [{
'id': '12345',
'type': 'contributors'
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Contributor identifier incorrectly formatted.'
# test_bulk_delete_invalid_id
res = app.delete_json_api(
url_public,
{'data': [{
'id': '12345-abcde',
'type': 'contributors'
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to delete.'
# test_bulk_delete_non_contributor
res = app.delete_json_api(
url_public,
{'data': [{
'id': make_contrib_id(
project_public._id, user_four._id
),
'type': 'contributors'
}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 404
# test_bulk_delete_all_contributors
res = app.delete_json_api(
url_public,
{'data': [
payload_public_one,
payload_public_two,
{
'id': make_contrib_id(
project_public._id, user._id
),
'type': 'contributors'
}
]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] in [
'Must have at least one registered admin contributor',
'Must have at least one visible contributor']
project_public.reload()
assert len(project_public.contributors) == 3
# test_bulk_delete_contributors_no_id
res = app.delete_json_api(
url_public,
{'data': [{'type': 'contributors'}]},
auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Request must include /data/id.'
# test_bulk_delete_contributors_no_type
res = app.delete_json_api(
url_public,
{'data': [{'id': make_contrib_id(
project_public._id, user_two._id
)}]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Request must include /type.'
# test_bulk_delete_contributors_invalid_type
res = app.delete_json_api(
url_public,
{'data': [{
'type': 'Wrong type',
'id': make_contrib_id(
project_public._id, user_two._id)
}]},
auth=user.auth, expect_errors=True, bulk=True)
assert res.status_code == 409
# test_bulk_delete_dict_inside_data
res = app.delete_json_api(
url_public,
{
'data': {
'id': make_contrib_id(
project_public._id,
user_two._id),
'type': 'contributors'}},
auth=user.auth,
expect_errors=True,
bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Expected a list of items but got type "dict".'
# test_bulk_delete_contributors_public_projects_logged_out
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
res = app.delete_json_api(
url_public,
{'data': [payload_public_one, payload_public_two]},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
# test_bulk_delete_contributors_private_projects_logged_out
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
res = app.delete_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
expect_errors=True, bulk=True)
assert res.status_code == 401
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
# test_bulk_delete_contributors_private_projects_logged_in_non_contributor
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
res = app.delete_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
auth=user_four.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
# test_bulk_delete_contributors_private_projects_logged_in_read_only_contributor
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
res = app.delete_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
auth=user_two.auth,
expect_errors=True, bulk=True)
assert res.status_code == 403
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
# test_bulk_delete_contributors_all_or_nothing
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
invalid_id = {
'id': '12345-abcde',
'type': 'contributors',
}
new_payload = {'data': [payload_public_one, invalid_id]}
res = app.delete_json_api(
url_public, new_payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Could not find all objects to delete.'
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
# test_bulk_delete_contributors_limits
new_payload = {'data': [payload_public_one] * 101}
res = app.delete_json_api(
url_public, new_payload, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
assert res.json['errors'][0]['detail'] == 'Bulk operation limit is 100, got 101.'
assert res.json['errors'][0]['source']['pointer'] == '/data'
# test_bulk_delete_contributors_no_payload
res = app.delete_json_api(
url_public, auth=user.auth,
expect_errors=True, bulk=True)
assert res.status_code == 400
def test_bulk_delete_contributors_public_project_logged_in(
self, app, user, payload_public_one, payload_public_two, url_public):
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 3
# Disconnect contributor_removed so that we don't check in files
# We can remove this when StoredFileNode is implemented in osf-models
with disconnected_from_listeners(contributor_removed):
res = app.delete_json_api(
url_public,
{'data': [payload_public_one, payload_public_two]},
auth=user.auth, bulk=True)
assert res.status_code == 204
res = app.get(url_public, auth=user.auth)
assert len(res.json['data']) == 1
def test_bulk_delete_contributors_private_projects_logged_in_contributor(
self, app, user, payload_private_one, payload_private_two, url_private):
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 3
# Disconnect contributor_removed so that we don't check in files
# We can remove this when StoredFileNode is implemented in osf-models
with disconnected_from_listeners(contributor_removed):
res = app.delete_json_api(
url_private,
{'data': [payload_private_one, payload_private_two]},
auth=user.auth, bulk=True)
assert res.status_code == 204
res = app.get(url_private, auth=user.auth)
assert len(res.json['data']) == 1
@pytest.mark.django_db
@pytest.mark.enable_quickfiles_creation
@pytest.mark.enable_implicit_clean
class TestNodeContributorFiltering:
@pytest.fixture()
def project(self, user):
return ProjectFactory(creator=user)
@pytest.fixture()
def url(self, project):
return '/{}nodes/{}/contributors/'.format(
API_BASE, project._id)
def test_filtering(self, app, user, url, project):
# test_filtering_full_name_field
filter_url = '{}?filter[full_name]=Freddie'.format(url)
res = app.get(filter_url, auth=user.auth, expect_errors=True)
assert res.status_code == 400
errors = res.json['errors']
assert len(errors) == 1
assert errors[0]['detail'] == '\'full_name\' is not a valid field for this endpoint.'
user_two = AuthUserFactory()
user_three = AuthUserFactory()
project.add_contributor(user_two, permissions.WRITE)
project.add_contributor(user_three, permissions.READ, visible=False)
# test_filtering_permission_field_admin
filter_url = '{}?filter[permission]=admin'.format(url, project._id)
res = app.get(filter_url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 1
assert res.json['data'][0]['attributes'].get('permission') == permissions.ADMIN
# test_filtering_permission_field_write
filter_url = '{}?filter[permission]=write'.format(url, project._id)
res = app.get(filter_url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 2
# test_filtering_permission_field_read
filter_url = '{}?filter[permission]=read'.format(url, project._id)
res = app.get(filter_url, auth=user.auth, expect_errors=True)
assert res.status_code == 200
assert len(res.json['data']) == 3
# test_filtering_node_with_only_bibliographic_contributors
# no filter
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert len(res.json['data']) == 3
# filter for bibliographic contributors
filter_url = url + '?filter[bibliographic]=True'
res = app.get(filter_url, auth=user.auth)
assert res.status_code == 200
assert len(res.json['data']) == 2
assert res.json['data'][0]['attributes'].get('bibliographic', None)
# filter for non-bibliographic contributors
filter_url = url + '?filter[bibliographic]=False'
res = app.get(filter_url, auth=user.auth)
assert len(res.json['data']) == 1
# test_filtering_on_invalid_field
filter_url = '{}?filter[invalid]=foo'.format(url, project._id)
res = app.get(filter_url, auth=user.auth, expect_errors=True)
assert res.status_code == 400
errors = res.json['errors']
assert len(errors) == 1
assert errors[0]['detail'] == '\'invalid\' is not a valid field for this endpoint.'
def test_filtering_node_with_non_bibliographic_contributor(
self, app, user, project, url):
non_bibliographic_contrib = UserFactory()
project.add_contributor(non_bibliographic_contrib, visible=False)
project.save()
# no filter
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert len(res.json['data']) == 2
# filter for bibliographic contributors
filter_url = url + '?filter[bibliographic]=True'
res = app.get(filter_url, auth=user.auth)
assert len(res.json['data']) == 1
assert res.json['data'][0]['attributes'].get('bibliographic', None)
# filter for non-bibliographic contributors
filter_url = url + '?filter[bibliographic]=False'
res = app.get(filter_url, auth=user.auth)
assert len(res.json['data']) == 1
assert not res.json['data'][0]['attributes'].get('bibliographic', None)
| apache-2.0 | 3,798,452,324,951,407,000 | 35.60987 | 124 | 0.522419 | false | 4.196178 | true | false | false |
ashgillman/Evolution-Plot | evolution-plot.py | 1 | 3868 | #!/usr/bin/env python3
"""Plots the evolution of an algorithm(s) by defining the
relationships between them.
"""
import os
from collections import defaultdict
from functools import partial
import argparse
from textwrap import wrap
import yaml
from graphviz import Digraph
# TODO: remove
from pprint import pprint
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
STYLE_FILE = os.path.join(__location__, 'format.yml')
def compose(*a):
def composed(f, g, *args, **kwargs):
return f(g(*args, **kwargs))
try:
return partial(composed, a[0], compose(*a[1:]))
except:
return a[0]
def load_data(file):
with open(file, encoding='utf-8') as f:
return yaml.safe_load(f)
def make_multi_font_label(labels, attributes, widths):
def ensure_string(maybe_string):
return ' ' if not maybe_string else str(maybe_string)
labels = map(ensure_string, labels)
return '< {} >'.format('<BR/>'.join(
'<FONT {}>{} </FONT>'.format(
' '.join('{}="{}"'.format(k, v) for k, v in attr.items()),
'<BR/>'.join(wrap(label, width)))
for label, attr, width in zip(labels, attributes, widths)))
def by_year_subgraph_constructor():
subgraph = Digraph()
subgraph.body.append('rank=same')
return subgraph
def add_edges(g, node, relation, styles):
if relation in node and node[relation]:
name = node['short name']
for link_obj in node[relation]:
# link might be listed as string or as only key of a dict
try:
link = ''.join(list(link_obj.keys())) # if dict
except:
link = link_obj
# link name may or may not be defined
try:
link_name = data[link]['short name']
except:
link_name = link
g.node(link_name, **styles['unknown nodes'])
g.edge(link_name, name, **styles[relation])
def generate_evolution_plot(data):
g = Digraph(format='png')
styles = load_data(STYLE_FILE)
# apply global graph styles
g.body.extend('{}={}'.format(k, v)
for k, v in styles['graph'].items())
# plot nodes
subgraphs = defaultdict(by_year_subgraph_constructor)
for node in data.values():
name = node['short name']
label = make_multi_font_label(*zip(*(
(name, styles['node name font'],
styles['node name width']),
(node['title'], styles['node title font'],
styles['node title width']),
(node['authors'], styles['node authors font'],
styles['node authors width']),
(node['description'], styles['node description font'],
styles['node description width']))))
subgraphs[node['year']].node(name, label, **styles['nodes'])
# plot edges
for id, node in data.items():
name = node['short name']
add_edges(g, node, 'develops on', styles)
add_edges(g, node, 'similar to', styles)
# plot year legend
years = sorted(list(subgraphs.keys()))
for year, graph in subgraphs.items():
graph.node(str(year), **styles['year nodes'])
for first, second in zip(years, years[1:]):
g.edge(str(first), str(second), **styles['year edges'])
for graph in subgraphs.values():
g.subgraph(graph)
g.render('img')
return g
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Plot algorithm evolution.')
parser.add_argument('data', help='Yaml file containing data.')
args = parser.parse_args()
data_file = args.data
data = load_data(data_file)
graph = generate_evolution_plot(data)
print(str(str(graph).encode(
'ascii', errors='backslashreplace').decode()))
| apache-2.0 | 5,106,822,965,566,527,000 | 29.456693 | 70 | 0.585315 | false | 3.84493 | false | false | false |
xesscorp/skidl | docs/_site/files/a-taste-of-hierarchy/intfc_brd.py | 2 | 4256 | from skidl import *
@SubCircuit
def osc(osc1, osc2, gnd,
crystal = Part("Device", 'Crystal', footprint='Crystal:Crystal_HC49-U_Vertical', dest=TEMPLATE),
cap = Part("Device", 'C', value='10pf', footprint='Capacitors_SMD:C_0603', dest=TEMPLATE) ):
'''Attach a crystal and two caps to the osc1 and osc2 nets.'''
xtal = crystal(1) # Instantiate the crystal from the template.
num_xtal_pins = len(xtal['.*']) # Get the number of pins on the crystal.
if num_xtal_pins == 4: # This handles a 4-pin crystal...
xtal[2, 4] += gnd # Connect the crystal ground pins.
xtal[3, 1] += osc1, osc2 # Connect the crystal pins to the oscillator nets.
else: # Otherwise assume it's a 2-pin crystal...
xtal[1,2] += osc1, osc2 # Using a two-pin crystal.
trim_cap = cap(2) # Instantiate some trimmer caps.
trim_cap[0][1, 2] += osc1, gnd # Connect the trimmer caps to the crystal.
trim_cap[1][1, 2] += osc2, gnd
# Libraries.
xess_lib = r'C:\xesscorp\KiCad\libraries\xess.lib'
pic32_lib = r'C:\xesscorp\KiCad\libraries\pic32.lib'
pickit3_lib = r'C:\xesscorp\KiCad\libraries\pickit3.lib'
# Global nets.
gnd = Net('GND')
gnd.drive = POWER
vusb = Net('VUSB')
vusb.drive = POWER
vdd = Net('+3.3V')
# Some common parts used as templates.
cap = Part("Device", 'C', footprint='Capacitors_SMD:C_0603', dest=TEMPLATE)
res = Part("Device", 'R', footprint='Resistors_SMD:R_0603', dest=TEMPLATE)
# Regulate +5V VUSB down to +3.3V for VDD.
vreg = Part(xess_lib, 'TPS793XX', footprint='TO_SOT_Packages_SMD:SOT-23-5')
noise_cap = cap(value='0.01uf')
vreg['IN, EN'] += vusb
vreg['GND'] += gnd
vreg['OUT'] += vdd
vreg['NR'] += noise_cap[1]
noise_cap[2] += gnd
# Microcontroller.
pic32 = Part(pic32_lib, 'pic32MX2\*0F\*\*\*B-QFN28',
footprint='Housings_DFN_QFN:QFN-28-1EP_6x6mm_Pitch0.65mm')
pic32['VSS'] += gnd
pic32['VDD'] += vdd # Main CPU power.
pic32['VUSB3V3'] += vdd # Power to USB transceiver.
pic32['^VBUS$'] += vusb # Monitor power pin of USB connector.
pic32['PAD'] += gnd # Power pad on bottom attached to ground.
# Bypass capacitors for microcontroller.
bypass = cap(3, value='0.1uf')
bypass[0][1, 2] += vdd, gnd
bypass[1][1, 2] += vdd, gnd
bypass[2][1, 2] += pic32['VCAP'], gnd
# Microcontroller MCLR circuitry:
# Pull-up resistor to VDD.
# Filter capacitor to delay exit of reset or eliminate glitches.
# Series resistor to isolate capacitor from device programmer.
r_pullup = res(value='10K')
r_series = res(value='1K')
filter_cap = cap(value='0.1uf')
r_series[1, 2] += r_pullup[1], pic32['MCLR']
r_pullup[2] += vdd
filter_cap[1, 2] += r_series[1], gnd
# USB connector.
usb_conn = Part(xess_lib, 'USB-MicroB', footprint='XESS:USB-microB-1')
usb_conn['D\+, D-, VBUS, GND, NC'] += pic32['D\+, D-'], vusb, gnd, NC
# Noise filtering/isolation on the USB connector shield.
shld_cap = cap(value='4.7nf')
shld_res = res(value='1M')
shld_cap[1] += usb_conn['shield']
shld_res[1] += usb_conn['shield']
gnd += shld_cap[2], shld_res[2]
# LED with current-limiting resistor driven by microcontroller pin.
led = Part("Device", 'led', footprint='Diodes_SMD:D_0603')
led_curr_limit = res(value='1K')
led_curr_limit[1, 2] += pic32['RB4'], led['A']
led['K'] += gnd
# Crystal and trim capacitors.
# crystal = Part(xess_lib, 'XTAL4', footprint='XESS:32x25-4', dest=TEMPLATE)
# osc(pic32['OSC1'], pic32['OSC2'], gnd, crystal, cap)
osc(pic32['OSC1'], pic32['OSC2'], gnd) # Use default crystal and trim caps.
# Port for attachment of device programmer.
prg_hdr = Part(pickit3_lib, 'pickit3_hdr', footprint='Pin_Headers:Pin_Header_Straight_1x06')
prg_hdr.ref = 'PRG'
prg_hdr['MCLR'] += pic32['MCLR']
prg_hdr['VDD'] += vdd
prg_hdr['GND'] += gnd
prg_hdr['PGC'] += pic32['PGEC1']
prg_hdr['PGD'] += pic32['PGED1']
# Port for attachment of FPGA programming pins.
port = Part('conn', 'CONN_01x06', footprint='Pin_Headers:Pin_Header_Straight_1x06')
port.ref = 'JTAG'
port[1, 2] += vusb, gnd
port[3] += pic32['SCK1'] # SCK1 output.
port[5] += pic32['RB5'] # PPS: SDI1 input.
port[4] += pic32['RB15'] # PPS: SS1 output.
port[6] += pic32['RA4'] # PPS: SDO1 output.
ERC()
generate_netlist()
| mit | -1,018,157,882,274,345,600 | 37.690909 | 105 | 0.639333 | false | 2.502058 | false | false | false |
viswimmer1/PythonGenerator | data/python_files/33222291/services.py | 1 | 9740 | import socket, time, os, pprint, pickle, sys
from time import mktime
from datetime import datetime, timedelta
# import os.path
from libs.dbconnect import dbConnector
import libs.utils
global cnf, db
cnf = "door.cnf"
# outpath = "%s/output" % os.path.dirname(__file__)
outpath = "%s/output" % sys.path[0]
def main():
global options, db
options = libs.utils.load_options()
db = dbConnector()
print "OK"
if len(sys.argv)>1:
if sys.argv[1] == '--test':
sendToLogger()
sys.exit()
if sys.argv[1] == '--lock':
updateDoor('locked')
sys.exit()
if sys.argv[1] == '--unlock':
updateDoor('unlocked')
sys.exit()
if sys.argv[1] == '--state':
print db.getState()
sys.exit()
if sys.argv[1] == '--sparks':
sendSparklines()
sendToServer()
sys.exit()
if sys.argv[1] == '--health':
checkHealth()
sys.exit()
if sys.argv[1] == '--log':
sendToLogger()
sys.exit()
if sys.argv[1] == '--tweet':
sendTweets()
sys.exit()
if sys.argv[1] == '--test-tweet':
from libs.services._twitter import Tweet
tweet = Tweet()
msg = "d queenvictoria %s" % (db.getState())
if len(sys.argv) == 3:
msg = "%s %s" % (msg, sys.argv[2])
tweet.setMessage(msg)
tweet.send()
sys.exit()
sendUpdates()
def sendUpdates():
global options, db
state = db.getState()
if libs.utils.get_option('state') != state:
libs.utils.set_option('state', state)
stateChanged(state)
sendToLogger()
sendTweets()
sendRSS(state)
sendSparklines()
sendToServer()
def sendToLogger():
global options, db
from libs.services._pachube import Logger
from libs.services._health import Health
if db.getState().lower() == "unlocked":
state = 0
else:
state = 1
anxiety = db.getCurrentScore(3)
health = Health()
data = {
"Anxiety" : anxiety,
"Temperature" : get_int(health.data['temp']),
"Signal" : get_int(health.data['wifi']),
"State" : state,
"Transmit" : int(health.data['traffic']['transmit']),
"Receive" : int(health.data['traffic']['receive']),
}
logger = Logger(data)
def checkHealth():
from libs.services._health import Health
health = Health()
output = []
for k, v in health.data.items():
output.append("%s %s" % (k,v))
output = ', '.join(output)
print "Sending %s" % output
# send a state changed message
from libs.services._twitter import Tweet
tweet = Tweet()
msg = "d queenvictoria %s" % (output)
tweet.setMessage(msg)
tweet.send()
def stateChanged(state):
# update the twitter icon
updateIcon(state)
sys.stderr.write("Door state changed: %s" % state)
# send a state changed message
from libs.services._twitter import Tweet
tweet = Tweet()
msg = "d queenvictoria %s" % (state)
tweet.setMessage(msg)
tweet.send()
# update the door last as it will die if the controller board is disconnected
updateDoor(state)
def updateDoor(state):
from libs.services._door import Door
door = Door()
if state.lower() == 'locked':
door.lock()
elif state.lower() == 'unlocked':
door.unlock()
def sendSparklines():
global options, db
from libs.services._sparkline import Sparkline
print "sendSparklines"
# FOO=`sqlite3 door_wordlist.db "SELECT r1q3 from scores WHERE date > date('$UTC') ORDER BY date DESC limit 144;"`;
# smoothed results over the last day, 7 days and 3 weeks
periods = [21, 1, 7, 91, 365]
for p in periods:
# hours ago, column
scores = db.getScores(24 * p, 3)
print "Number of scores: %d" % len(scores)
if not len(scores):
print "No results for %d" % p
continue
data = []
for a_score in scores:
data.append(a_score[0])
# if we're doing 21 days then calculate the average so we can use it everywhere
if p == 21:
mean = sum(data)/len(data)
print mean
data.reverse()
print "Data length: %d" % len(data)
# every pth item (24th etc) - is this to shorten the spark ?
# data = data[::p]
# instead return an optimal sized array
max_width = 240
interval = int(len(data) / 240)
if ( interval > 1 ):
print "Interval %d" % interval
data = data[::interval]
print "Data length: %d" % len(data)
spark = Sparkline()
spark.setFormat('png')
spark.setData(data)
spark.setOutfile('%s/spark_%dd.png' % (outpath,p))
spark.setXAxis(mean)
if p == 1:
spark.setXTicks(24)
elif p == 7:
spark.setXTicks(7)
elif p == 21:
spark.setXTicks(3)
spark.setFillColour((255,0,0,25))
im = spark.getImage()
# this one is all the results in order today
def sendRSS(state):
global options, db
# a list of days
# 1, 7, 91
print "sendRSS"
from libs.services._rss2 import RSS2
# db = dbConnector()
# happy
d7_q4 = db.getStrongestResult(7, 4)
# fear
# TO FIX why do we have to specify 2 days here ?
d1_q3 = db.getStrongestResult(2, 3)
d7_q3 = db.getStrongestResult(7, 3)
d21_q3 = db.getStrongestResult(21, 3)
d91_q3 = db.getStrongestResults(91, 3, 3)
current_state = []
current_state.append(state)
current_state.append('http://door.just1.name')
# def getScores(self, from_hours_ago=24, quality_column=3, limit=-1):
current_score = db.getCurrentScore(3)
if current_score:
current_score = current_score[0]
current_state.append('[Now] %d [3wk mean] %d' % (current_score, db.getMean(21)))
else:
current_state.append('Unknown')
current_state.append(time.strftime('%Y-%m-%d %H:%M:%S', datetime.today().timetuple()))
# print d7_q4
# print d7_q3
rss = RSS2()
rss.appendItem("Currently", current_state)
rss.appendItem("24h fear", d1_q3)
rss.appendItem("7d fear", d7_q3)
rss.appendItem("3wk fear", d21_q3)
rss.appendItem("7d happy", d7_q4)
for item in d91_q3:
rss.appendItem("3mo fear", item)
print rss.getXML()
rss.saveRSS('%s/door.rss' % outpath)
def updateTwitterFollowing():
from libs.services._twitter import Tweet
tweet = Tweet()
print tweet.updateFollowing()
def sendTweets():
global options, db
# search the db for the weeks happiest news
# $UTC = getUTCByDateAndTimezone(date('Y-m-d H:i:s', strtotime('-7 days')), "Australia/Sydney");
# $SELECT = "
# $S
# FROM articles
# LEFT JOIN ratings_1 ON articles.id = ratings_1.id
# WHERE articles.date_utc > date('$UTC')
# ORDER BY ratings_1.q4 DESC
# LIMIT 1
# ";
# $results3 = $db->query($SELECT);
# the_date = datetime.today() - timedelta(days=7)
# one_week_ago = getUTCDate(time.strftime('%Y-%m-%d %H:%M:%S', the_date.timetuple()), "Australia/Sydney")
# query = 'SELECT articles.title, articles.link, articles.id FROM articles LEFT JOIN ratings_1 ON articles.id = ratings_1.id WHERE date(articles.date_utc) > "%s" ORDER BY ratings_1.q4 DESC LIMIT 1' % (one_week_ago)
# from pysqlite2 import dbapi2 as sqlite
# db = sqlite.connect('door_wordlist.db')
# cursor = db.cursor()
# cursor.execute(query)
# item = cursor.fetchone()
# d7_q4 = db.getStrongestResult(7, 4)
d7_q4 = db.getStrongestMood(7, 'happy')
# d7_q4 = db.getStrongestMoods(7, 'happy', 3)
item = d7_q4
print item
# if we've already announced the joyous news do nothing
last_update = libs.utils.get_option('last_update')
# last_update = ''
print last_update
if last_update == item[4]:
print "Already sent"
return
else:
print "New update %d" % item[4]
# otherwise continue on and send updates
from libs.services._twitter import Tweet
tweet = Tweet()
# update our followers first
tweet.updateFollowing()
# create a short url pointing to the original article
url = tweet.ShortenUrl(item[1], 'trim').strip()
print url
# determine any hashtags to use
title = item[0]
max_length = 140
# trim the title if necessary
if len(title) + len(url) >= max_length:
print 'Trim required.'
title = title[:max_length-len(url)-1]
msg = "%s %s" % (title, url)
print "%s [%d]" % (msg, len(msg))
tweet.setMessage(msg)
tweet.send()
libs.utils.set_option('last_update', item[4])
def updateIcon(state):
# global db
from libs.services._twitter import Tweet
tweet = Tweet()
# if db.isLocked():
# tweet.setImage('http://door.just1.name/wp-content/themes/icon-locked.png')
# tweet.setImage('/home/rossetti/door/icon-locked.jpg')
# else:
# tweet.setImage('http://door.just1.name/wp-content/themes/icon-unlocked.png')
# tweet.setImage('/home/rossetti/door/icon-unlocked.jpg')
tweet.setImage("/home/rossetti/door/icon-%s.jpg" % state.lower())
def sendToServer():
# paramiko has a nice sftp.put(self, localpath, remotepath, callback=None)
# Carroll Oct 1 at 13:28
print "Sending to the server"
import paramiko
import os, glob, hashlib
host = "house.laudanum.net"
port = 2220
try:
transport = paramiko.Transport((host, port))
privatekeyfile = os.path.expanduser('~/.ssh/id_rsa')
mykey = paramiko.RSAKey.from_private_key_file(privatekeyfile)
username = 'rossetti'
transport.connect(username = username, pkey = mykey)
sftp = paramiko.SFTPClient.from_transport(transport)
glob_pattern = "*"
files_copied = 0
for fname in glob.glob(outpath + os.sep + glob_pattern):
is_up_to_date = False
local_file = os.path.join(outpath, fname)
remote_file = '/home/rossetti/door/waikato/output/' + os.path.basename(fname)
try:
if sftp.stat(remote_file):
local_file_data = open(local_file, "rb").read()
remote_file_data = sftp.open(remote_file).read()
md1 = md5.new(local_file_data).digest()
md2 = md5.new(remote_file_data).digest()
if md1 == md2:
is_up_to_date = True
except:
print "NEW: ", os.path.basename(fname),
if not is_up_to_date:
sftp.put(local_file, remote_file)
files_copied += 1
sftp.close()
transport.close()
except socket.error as inst:
# socket.error: (113, 'No route to host')
pass
except:
print "Couldn't send to server."
def get_int(val):
import re
m = re.match("^\d+", val)
return int(m.group(0))
if __name__ == "__main__":
main()
| gpl-2.0 | -2,251,436,603,034,342,100 | 24.167959 | 214 | 0.666016 | false | 2.707812 | false | false | false |
he0x/FIR | fir_nuggets/models.py | 3 | 1271 | # -*- coding: utf-8 -*-
import datetime
from django.db import models
from django import forms
from django.contrib.auth.models import User
from incidents.models import Incident
class Nugget(models.Model):
date = models.DateTimeField(default=datetime.datetime.now, blank=True)
raw_data = models.TextField()
source = models.TextField()
start_timestamp = models.DateTimeField(default=datetime.datetime.now, blank=True, null=True)
end_timestamp = models.DateTimeField(blank=True, null=True)
interpretation = models.TextField()
incident = models.ForeignKey(Incident)
found_by = models.ForeignKey(User)
def __unicode__(self):
return u"Nugget: {} in {} ({})".format(self.source, self.incident, self.interpretation)
class NuggetForm(forms.ModelForm):
class Meta:
model = Nugget
exclude = ('incident', 'found_by')
widgets = {
'source': forms.TextInput(attrs={'placeholder': 'NTUSER, $MFT, %APPDATA%, RAM, etc...'}),
'interpretation': forms.Textarea(attrs={'cols': 100, 'rows': 5, 'placeholder': 'What the raw data means to the case.'}),
'raw_data': forms.Textarea(attrs={'placeholder': 'Raw data: log lines, directory listings, registry keys...'}),
'end_timestamp': forms.TextInput(attrs={'placeholder': 'Leave blank if atomic event'}),
}
| gpl-3.0 | 722,873,372,476,681,100 | 33.351351 | 123 | 0.718332 | false | 3.416667 | false | false | false |
attm2x/m2x-python-mqtt | m2x_mqtt/v2/commands.py | 1 | 1545 | from m2x_mqtt.v2.resource import Resource
class Command(Resource):
""" Wrapper for AT&T M2X `Commands API <https://m2x.att.com/developer/documentation/v2/commands>`_
"""
COLLECTION_PATH = 'devices/{device_id}/commands'
ITEM_PATH = 'devices/{device_id}/commands/{id}'
ITEMS_KEY = 'commands'
def __init__(self, api, device, **data):
self.device = device
super(Command, self).__init__(api, **data)
def subpath(self, path):
return self.item_path(self.id, device_id=self.device.id) + path
def process(self, **response_data):
""" Method for `Device Marks a Command as Processed <https://m2x.att.com/developer/documentation/v2/commands#Device-Marks-a-Command-as-Processed>`_ endpoint.
:param params: Query parameters passed as keyword arguments. View M2X API Docs for listing of available parameters.
:return: The API response, see M2X API docs for details
:rtype: dict
"""
return self.api.post(self.subpath('/process'), data=response_data)
def reject(self, **response_data):
""" Method for `Device Marks a Command as Rejected <https://m2x.att.com/developer/documentation/v2/commands#Device-Marks-a-Command-as-Rejected>`_ endpoint.
:param params: Query parameters passed as keyword arguments. View M2X API Docs for listing of available parameters.
:return: The API response, see M2X API docs for details
:rtype: dict
"""
return self.api.post(self.subpath('/reject'), data=response_data)
| mit | 8,717,984,222,571,933,000 | 43.142857 | 165 | 0.671197 | false | 3.652482 | false | false | false |
cvast/arches | tests/ui/pages/form_page.py | 1 | 1876 | import re
from base_page import BasePage, script_returns_true
from page_locators import FormPageLocators as locators
from selenium.webdriver.support import expected_conditions as EC
from arches.urls import uuid_regex
class FormPage(BasePage):
"""
class to initialize the form-manager page
"""
def __init__(self, driver, live_server_url, graph_id):
super(FormPage, self).__init__(driver, live_server_url, '/graph/' + graph_id + '/form_manager')
def add_new_form(self):
"""
Clicks on the add new form button and returns a new form_id
"""
self.open()
form_id = None
self.wait.until(
EC.element_to_be_clickable(locators.ADD_FORM_BUTTON)
).click()
try:
form_id = self.wait.until(
script_returns_true('''
try{
var matches = window.location.pathname.match(/(''' + uuid_regex + ''')/i);
console.log(window.location)
if (matches && matches.length === 2){
console.log(matches)
return matches[1];
}else{
return false;
}
}catch(err){
return false;
}
''')
)
except:
pass
return form_id
def configure_form(self, form_name):
self.wait.until(
EC.element_to_be_clickable(locators.ADD_FORM_CARD_BUTTON)
).click()
form_name_input = self.driver.find_element(*locators.FORM_NAME_INPUT)
form_name_input.clear()
form_name_input.send_keys(form_name)
self.wait.until(
EC.element_to_be_clickable(locators.SAVE_EDITS_BUTTON)
).click()
| agpl-3.0 | 4,862,483,298,900,809,000 | 31.344828 | 103 | 0.513859 | false | 4.234763 | false | false | false |
nuagenetworks/vspk-python | vspk/v6/nupgexpressiontemplate.py | 1 | 9684 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from bambou import NURESTObject
class NUPGExpressionTemplate(NURESTObject):
""" Represents a PGExpressionTemplate in the VSD
Notes:
Policy Group Expression Template is an expression consisting of policy groups defined at Domain Template or L2 Domain Template
"""
__rest_name__ = "pgexpressiontemplate"
__resource_name__ = "pgexpressiontemplates"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a PGExpressionTemplate instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> pgexpressiontemplate = NUPGExpressionTemplate(id=u'xxxx-xxx-xxx-xxx', name=u'PGExpressionTemplate')
>>> pgexpressiontemplate = NUPGExpressionTemplate(data=my_dict)
"""
super(NUPGExpressionTemplate, self).__init__()
# Read/Write Attributes
self._name = None
self._last_updated_by = None
self._last_updated_date = None
self._description = None
self._entity_scope = None
self._creation_date = None
self._owner = None
self._expression = None
self._external_id = None
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=True)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="expression", remote_name="expression", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self._compute_args(**kwargs)
# Properties
@property
def name(self):
""" Get name value.
Notes:
Name of the Policy Group Expression Template
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of the Policy Group Expression Template
"""
self._name = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def description(self):
""" Get description value.
Notes:
Description of the Policy Group Expression Template
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the Policy Group Expression Template
"""
self._description = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def expression(self):
""" Get expression value.
Notes:
Actual Policy Group Expression like (PG1 || PG2) && !PG3. Allowed operators are && (AND), ! (NOT), II (OR) and ( )
"""
return self._expression
@expression.setter
def expression(self, value):
""" Set expression value.
Notes:
Actual Policy Group Expression like (PG1 || PG2) && !PG3. Allowed operators are && (AND), ! (NOT), II (OR) and ( )
"""
self._expression = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| bsd-3-clause | -2,133,729,240,394,213,600 | 29.07764 | 175 | 0.584056 | false | 4.747059 | false | false | false |
bepo13/destinydb-stl-generator-v0 | src/DataParse.py | 1 | 1078 | import io
import struct
import numpy as np
class DataParseClass:
def __init__(self, byteData):
self.data = io.BytesIO(byteData)
return
def readUTF(self):
length = int.from_bytes(self.data.read(2), byteorder='little')
return self.data.read(length).decode('utf-8')
def readInt8(self):
return int.from_bytes(self.data.read(1), byteorder='little')
def readInt16(self):
return int.from_bytes(self.data.read(2), byteorder='little')
def readInt32(self):
return int.from_bytes(self.data.read(4), byteorder='little')
def readFloat(self):
return struct.unpack('f', self.data.read(4))[0]
def readVector2D(self):
return np.array([self.readFloat(), self.readFloat()], dtype='float')
def readVector3D(self):
return np.array([self.readFloat(), self.readFloat(), self.readFloat()], dtype='float')
def readVector4D(self):
return np.array([self.readFloat(), self.readFloat(), self.readFloat(), self.readFloat()], dtype='float') | mit | -6,224,637,056,171,936,000 | 31.69697 | 112 | 0.62616 | false | 3.641892 | false | false | false |
ptphp/PyLib | src/sniffer/pyhttp/link_base.py | 1 | 3799 | #encoding=UTF-8
"""
@author [email protected]
@link http://www.ideawu.net/
"""
import new, socket
from buffer import *
LINK_ROLE_SERVER = 1
LINK_ROLE_CLIENT = 2
LINK_ROLE_ACCEPT = 3
class LinkBase:
def __init__(self, sock=None):
self.id = -1
self.fd = None
self.sock = None
self.local_addr = '' # ip:port
self.remote_addr = '' # ip:port
self.parent = None
self.role = None
self.ptr = None
self.alive = False
self.recv_pkt = None
self.recv_buf = Buffer();
self.send_buf = Buffer();
def is_client(self):
return self.role == LINK_ROLE_CLIENT
def is_server(self):
return self.role == LINK_ROLE_SERVER
def is_accept(self):
return self.role == LINK_ROLE_ACCEPT
def listen(self, host, port, backlog=128):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(backlog)
except BaseException, e:
return False
self.role = LINK_ROLE_SERVER
self.set_sock(sock)
# TODO: accept_all(self):
def accept(self):
sock, addr = self.sock.accept()
link = new.instance(self.__class__)
link.__init__(sock)
link.role = LINK_ROLE_ACCEPT
link.parent = self
link.remote_addr = "%s:%d" % sock.getpeername()
return link
def connect(self, host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((host, port))
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.role = LINK_ROLE_CLIENT
self.set_sock(sock)
self.remote_addr = "%s:%d" % sock.getpeername()
def set_sock(self, sock):
self.fd = sock.fileno()
self.sock = sock
self.alive = True
self.local_addr = "%s:%d" % sock.getsockname()
def is_alive(self):
return self.alive
def close(self):
self.alive = False
try:
self.sock.close()
self.sock = None
except:
pass
def fileno(self):
return self.fd
""" 判断是否已经读就绪 """
def recv_ready(self):
return self.recv_pkt.ready()
""" 进行一次网络读操作 """
def net_recv(self, bufsize=8192):
try:
data = self.sock.recv(bufsize)
#data = self.sock.recv(3)
#print 'link <-', repr(data)
except BaseException,e:
return -1
if not data:
return 0
self.recv_buf.append(data)
return len(data)
""" 进行一次网络写操作
@return
-1: 错误
0 : 建议调用者关闭连接
"""
def net_send(self):
try:
len = self.sock.send(self.send_buf.base)
#len = self.sock.send(self.send_buf.base[0:3])
#print 'link ->', repr(self.send_buf.base[0:len])
except BaseException,e:
return -1
self.send_buf.consume(len)
return len
""" 非阻塞发送(数据拷贝到发送缓冲) """
def async_send(self, data):
return self.send(data, urgent=False)
""" 非阻塞读取 """
def async_recv(self):
return self.recv(block=False)
""" 见 send_packet, 只传入要发送的报体 """
def send(self, data, urgent=True):
packet = self.PacketClass()
packet.set_body(data)
ret = self.send_packet(packet, urgent)
return ret
""" 见 recv_packet, 只返回报体部分 """
def recv(self, block=True):
ret = self.recv_packet(block)
if ret == -1:
return -1
elif ret == None:
return None
else:
return ret.body
""" 非阻塞的 send_packet """
def async_send_packet(self, packet):
return self.send_packet(packet, urgent=False)
""" 非阻塞的 recv_packet """
def async_recv_packet(self):
return self.recv_packet(block=False)
""" 将报文写到发送缓冲里
@param urgent: 若为True, 则等待网络发送完毕才返回. 默认等待.
@return
-1: 错误
"""
def send_packet(self, packet, urgent=True):
data = packet.encode()
self.send_buf.append(data)
if urgent:
while self.send_buf.len() > 0:
if self.net_send() == -1:
return -1
return len(data)
| apache-2.0 | 7,084,317,711,486,573,000 | 20.39521 | 61 | 0.653792 | false | 2.394772 | false | false | false |
GeographicaGS/urbo-pgsql-connector | scripts/delete_subscr_list.py | 1 | 3879 | # -*- coding: utf-8 -*-
#
# Copyright 2017 Telefónica Digital España S.L.
#
# This file is part of URBO PGSQL connector.
#
# URBO PGSQL connector is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# URBO PGSQL connector is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with URBO PGSQL connector. If not, see http://www.gnu.org/licenses/.
#
# For those usages not covered by this license please contact with
# iot_support at tid dot es
import requests
import yaml
import json
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
try:
"""
Remove InsecureRequestWarning for unverified HTTPS requests.
For Requests library version < 2.4 an error raise in this import.
"""
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
except ImportError as err:
# raise ImportError("{}\nYou need to upgrade Requests Library".format(err))
pass
class GetAuthTokenException(Exception):
pass
class DeleteSubscriptionException(Exception):
pass
def getAuthToken(url_authtk, fl_fw_auth, timeout=10, ssl=False):
try:
headers_authtk = {'Content-Type': 'application/json'}
with open(fl_fw_auth) as fw_json_auth:
json_data = json.load(fw_json_auth)
payload = json.dumps(json_data)
resp = requests.post(url_authtk, headers=headers_authtk,
data=payload, verify=ssl, timeout=timeout)
if resp.ok:
auth_token = resp.headers.get('x-subject-token')
resp_json = resp.json().get('token')
exp_date = resp_json.get('expires_at')
return(auth_token, exp_date)
else:
raise GetAuthTokenException("Error: {}".format(resp.json()))
except Exception as err:
print("Error: {}".format(err))
def deleteSubscriptions(subs, url_subs, fiw_serv, fiw_subsserv, authtoken, timeout=10, ssl=False):
try:
headers_authtk = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Fiware-Service': fiw_serv,
'Fiware-ServicePath': fiw_subsserv,
'x-auth-token': authtoken
}
for subs_id in subs:
json_data = {
"subscriptionId": subs_id
}
payload = json.dumps(json_data)
resp = requests.post(url_subs, headers=headers_authtk,
data=payload, verify=ssl, timeout=timeout)
if resp.ok:
#print resp.json()
print("{0}. Deleted subscription: {1}".format(resp, subs_id))
else:
print(resp)
raise DeleteSubscriptionException("Error: {}".format(resp.json()))
except Exception as err:
print("Error: {}".format(err))
def main():
fl_fw_auth = "fiware_auth.json"
url_authtk = 'https://195.235.93.224:15001/v3/auth/tokens'
auth_token, exp_date = getAuthToken(url_authtk, fl_fw_auth)
print(auth_token)
url_subs = 'https://195.235.93.224:10027/v1/unsubscribeContext'
fiw_serv = 'sc_smart_region_andalucia'
fiw_subsserv = '/and_sr_torrox'
subs = ['580f43b9fdc8301538a65ab4']
deleteSubscriptions(subs, url_subs, fiw_serv, fiw_subsserv, auth_token)
if __name__ == '__main__':
main()
| agpl-3.0 | -1,041,767,896,036,808,600 | 31.308333 | 98 | 0.640186 | false | 3.695901 | false | false | false |
CenterForOpenScience/isp | scripts/drag_scripts/drag_cat9_helper.py | 1 | 4011 | import time
import pyautogui
HESITATE = 3 # time to wait before starting clickstorm
pyautogui.PAUSE = .06 # going faster may cause issues with placement
SPEED = .1
positions = []
print("If you have a dual monitor set up, make sure the browser window is "
"located on your main monitor.\n If you are using a screen and laptop, "
"the laptop is your main monitor.")
print("Please align your browser so you can see all category boxes. While you "
"input and follow these commands, do not move or scroll your browswer "
"window!")
input("Please mouse over the top most draggable option in the leftmost "
"category box. It should be labeled with 'Uncharacteristic'. Then press "
"return.")
left_position = pyautogui.position()
input("Please mouse over the top most draggable option in the middle category "
"box. It should be labeled with 'Neutral'. Then press return.")
center_position = pyautogui.position()
input("Please mouse over the top most draggable option in the rightmost "
"category box. It should be labeled with 'Characteristic'. Then press "
"return.")
right_position = pyautogui.position()
print("Now, for each of the bottom category boxes, mouse over them and press "
"return, then move onto the next box. There should be 9 boxes in total.")
for categorynum in range(9):
input(categorynum)
positions.append(pyautogui.position())
print("Please switch back to your browser window. The script will begin "
"in {} seconds".format(HESITATE))
time.sleep(HESITATE)
# The below counts in the array may look odd, but it was just the
# easiest way to break the containers. Since the first one needs 3 in
# it, second needs 6 in it etc. it adds up to 30 because at the end
# of running the script for cat 3, each container will have 30 in
# it. In the middle part, the 5 is there because that container has 5
# slots in it left over after 10 are placed in it from the first loop.
# The general idea is, starting on the left, place cards into the
# containers till full, then move onto the next container. if a
# container fills up or contains no more cards, move onto the next.
# The first array in the zip corresponds to the amount that needs to
# go in the below containers on cat9, and adds up the 30. The range(4)
# is there to grab the appropriate offset for containers 1-4 etc.
for card_count, pos_count in zip([3, 6, 11, 10], range(4)):
offset = (- left_position[0] + positions[pos_count][0],
- left_position[1] + positions[pos_count][1])
for k in range(card_count):
pyautogui.moveTo(left_position[0], left_position[1], SPEED)
pyautogui.click()
pyautogui.dragRel(offset[0], offset[1], SPEED, button='left')
pyautogui.click()
# The first array in the zip corresponds to the amount that needs to
# go in the below containers on cat9, and adds up the 30. The
# range(3,6) is there to grab the appropriate offset for containers
# 3-6 etc.
for card_count, pos_count in zip([5, 20, 5], range(3, 6)):
offset = (- center_position[0] + positions[pos_count][0],
- center_position[1] + positions[pos_count][1])
for k in range(card_count):
pyautogui.moveTo(center_position[0], center_position[1], SPEED)
pyautogui.click()
pyautogui.dragRel(offset[0], offset[1], SPEED, button='left')
pyautogui.click()
# The first array in the zip corresponds to the amount that needs to
# go in the below containers on cat9, and adds up the 30. The range(4)
# is there to grab the appropriate offset for containers 5-6 etc.
for card_count, pos_count in zip([10, 11, 6, 3], range(5, 9)):
offset = (- right_position[0] + positions[pos_count][0],
- right_position[1] + positions[pos_count][1])
for k in range(card_count):
pyautogui.moveTo(right_position[0], right_position[1], SPEED)
pyautogui.click()
pyautogui.dragRel(offset[0], offset[1], SPEED, button='left')
pyautogui.click()
| apache-2.0 | 9,005,574,204,654,823,000 | 44.579545 | 79 | 0.695836 | false | 3.527704 | false | false | false |
youknowone/transtool | transtooltests/test_dictionary.py | 1 | 1718 |
#-*- coding: utf-8 -*-
from transtool.dictionary import INILoader
from transtool.dictionary.models import KoreanPackage
from transtool.dictionary.exc import WordNotFound, MultipleCandidates
def assertlog(cond, *logitems):
try:
assert cond
except AssertionError as e:
if logitems:
for item in logitems:
print item,
print ''
def test_dictionary():
l = INILoader('test.ini')
package = l.gen_package()
package.build_index()
w = package.get('list')
assertlog(package.get('list').candidate == u'리스트', package.get('list'), u'리스트')
assert package.get('tuple').candidate == u'튜플'
assert package.get('list', 'python').candidate == u'리스트'
try:
package.get('list', 'web')
except WordNotFound:
pass
try:
package.get('form')
except MultipleCandidates:
pass
return package
def test_korean_dictionary():
l = INILoader('test.ini', Package=KoreanPackage)
package = l.gen_package()
package.build_index()
assert package.get(u'list는').candidate == u'리스트는'
assert package.get(u'list가').candidate == u'리스트가'
assert package.get(u'list를').candidate == u'리스트를'
assert package.get(u'tuple은').candidate == u'튜플은'
assert package.get(u'tuple이').candidate == u'튜플이'
assert package.get(u'tuple을').candidate == u'튜플을'
assert package.get(u'list을').candidate == u'리스트를'
assert package.get(u'tuple가').candidate == u'튜플이'
assert package.get(u'dictionary는').candidate == u'딕셔너리는'
return package
if __name__ == '__main__':
test_dictionary()
| gpl-3.0 | 1,862,279,873,957,463,800 | 28.309091 | 83 | 0.640199 | false | 2.818182 | false | false | false |
rougier/dana | doc/examples/chapter-4/example-3.py | 1 | 2413 | w#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright INRIA
# Contributors: Nicolas P. Rougier ([email protected])
#
# DANA is a computing framework for the simulation of distributed,
# asynchronous, numerical and adaptive models.
#
# This software is governed by the CeCILL license under French law and abiding
# by the rules of distribution of free software. You can use, modify and/ or
# redistribute the software under the terms of the CeCILL license as circulated
# by CEA, CNRS and INRIA at the following URL
# http://www.cecill.info/index.en.html.
#
# As a counterpart to the access to the source code and rights to copy, modify
# and redistribute granted by the license, users are provided only with a
# limited warranty and the software's author, the holder of the economic
# rights, and the successive licensors have only limited liability.
#
# In this respect, the user's attention is drawn to the risks associated with
# loading, using, modifying and/or developing or reproducing the software by
# the user in light of its specific status of free software, that may mean that
# it is complicated to manipulate, and that also therefore means that it is
# reserved for developers and experienced professionals having in-depth
# computer knowledge. Users are therefore encouraged to load and test the
# software's suitability as regards their requirements in conditions enabling
# the security of their systems and/or data to be ensured and, more generally,
# to use and operate it in the same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
# -----------------------------------------------------------------------------
from dana import *
n = 50
k = 2.5
G = zeros((n,n), 'dV/dt = k*(N/4-V); N')
K = np.zeros((3,3))*np.NaN
K[0,1] = K[1,0] = K[1,2] = K[2,1] = 1
print K
SparseConnection(G('V'), G('N'), K)
t, dt = 600.0, 0.1
for i in range(int(t/dt)):
G.evaluate(dt=dt)
G.V[0,:] = 0
G.V[:,n-1] = G.V[n-1,:] = G.V[:,0] = 1
fig = plt.figure(figsize=(10,7.5))
plt.imshow(G.V, cmap=plt.cm.hot, origin='lower',
interpolation='bicubic', vmin=0, vmax=1)
plt.colorbar()
CS = plt.contour(G.V, 10, colors='k')
plt.clabel(CS, inline=1, fontsize=16)
plt.grid(), plt.show()
| bsd-3-clause | 3,493,214,056,072,307,700 | 41.333333 | 79 | 0.67385 | false | 3.384292 | false | false | false |
kajackdfw/flaskirover | hardware_drivers/motor/raspirobot_board_v3.py | 1 | 1736 | from rrb3 import *
from time import sleep
class Motor:
settings = {}
try:
rr = RRB3(8, 6)
settings['drive'] = 'active'
uis['drive'] = 'active'
except NameError:
print(" - No RaspiRover library available.")
settings['drive'] = 'disabled'
uis['drive'] = 'disabled'
def __init__(self, start_settings):
self.settings['motor_mode'] = 'immobile'
self.can_rotate_in_place = False
def test(self):
track_right = 0.75
track_left = 0.75
rr.set_motors(track_right, 0.5, track_left, 0.5)
sleep(2)
rr.set_motors(0, 0.5, 0, 0.5)
def set_setting(self, setting_name, new_value, category, specs):
print(str(specs))
if not setting_name in specs:
return 0
else:
old_value = self.settings[setting_name]
print(' ? Old value = ' + old_value)
if specs[setting_name]['type'] == 'int':
self.settings[setting_name] = int(new_value)
elif specs[setting_name]['type'] == 'float':
self.settings[setting_name] = float(new_value)
elif specs[setting_name]['type'] == 'bool' and new_value.uppercase() == 'TRUE':
self.settings[setting_name] = True
elif specs[setting_name]['type'] == 'bool' and new_value.uppercase() == 'FALSE':
self.settings[setting_name] = False
else:
self.settings[setting_name] = new_value
# Does this change need a page redraw
if 'refresh' in specs[setting_name]:
return 1
else:
return 0
def get_settings(self):
return self.settings
| gpl-3.0 | -299,814,461,353,550,800 | 31.754717 | 92 | 0.534562 | false | 3.773913 | false | false | false |
NMGRL/pychron | pychron/dvc/dvc_persister.py | 1 | 29201 | # ===============================================================================
# Copyright 2015 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import hashlib
import os
import shutil
from datetime import datetime
from apptools.preferences.preference_binding import bind_preference
from git.exc import GitCommandError
# ============= enthought library imports =======================
from sqlalchemy.exc import OperationalError, DatabaseError
from traits.api import Instance, Bool, Str
from uncertainties import std_dev, nominal_value
from yaml import YAMLError
from pychron.core.helpers.binpack import encode_blob, pack
from pychron.core.yaml import yload
from pychron.dvc import dvc_dump, analysis_path, repository_path, NPATH_MODIFIERS
from pychron.experiment.automated_run.persistence import BasePersister
from pychron.git_archive.repo_manager import GitRepoManager
from pychron.paths import paths
from pychron.pychron_constants import DVC_PROTOCOL, NULL_STR, ARGON_KEYS, ARAR_MAPPING, EXTRACTION_ATTRS, \
META_ATTRS, NULL_EXTRACT_DEVICES, POSTCLEANUP, PRECLEANUP, CLEANUP, EXTRACT_UNITS, EXTRACT_VALUE, DURATION, WEIGHT, \
CRYO_TEMP
def format_repository_identifier(project):
return project.replace('/', '_').replace('\\', '_')
def spectrometer_sha(settings, src, defl, gains):
sha = hashlib.sha1()
for d in settings + (src, defl, gains):
for k, v in sorted(d.items()):
sha.update(k.encode('utf-8'))
sha.update(str(v).encode('utf-8'))
return sha.hexdigest()
class DVCPersister(BasePersister):
active_repository = Instance(GitRepoManager)
dvc = Instance(DVC_PROTOCOL)
use_isotope_classifier = Bool(False)
use_uuid_path_name = Bool(True)
# isotope_classifier = Instance(IsotopeClassifier, ())
stage_files = Bool(True)
default_principal_investigator = Str
_positions = None
save_log_enabled = Bool(False)
arar_mapping = None
def __init__(self, bind=True, *args, **kw):
super(DVCPersister, self).__init__(*args, **kw)
if bind:
bind_preference(self, 'use_uuid_path_name', 'pychron.experiment.use_uuid_path_name')
self._load_arar_mapping()
def per_spec_save(self, pr, repository_identifier=None, commit=False, commit_tag=None, push=True):
self.per_spec = pr
if repository_identifier:
self.initialize(repository_identifier, False)
self.pre_extraction_save()
self.pre_measurement_save()
self.post_extraction_save()
self.post_measurement_save(commit=commit, commit_tag=commit_tag, push=push)
def push(self):
# push changes
self.dvc.push_repository(self.active_repository)
# push commit
self.dvc.meta_push()
def initialize(self, repository, pull=True):
"""
setup git repos.
repositories are guaranteed to exist. The automated run factory clones the required projects
on demand.
:return:
"""
self.debug('^^^^^^^^^^^^^ Initialize DVCPersister {} pull={}'.format(repository, pull))
self.dvc.initialize()
repository = format_repository_identifier(repository)
self.active_repository = repo = GitRepoManager()
root = repository_path(repository)
repo.open_repo(root)
remote = 'origin'
if repo.has_remote(remote) and pull:
self.info('pulling changes from repo: {}'.format(repository))
try:
repo.pull(remote=remote, use_progress=False, use_auto_pull=self.dvc.use_auto_pull)
except GitCommandError:
self.warning('failed pulling changes')
self.debug_exception()
def pre_extraction_save(self):
pass
def post_extraction_save(self):
self.info('================= post extraction save started =================')
per_spec = self.per_spec
rblob = per_spec.response_blob # time vs measured response
oblob = per_spec.output_blob # time vs %output
sblob = per_spec.setpoint_blob # time vs requested
gp = per_spec.grain_polygons
if rblob is not None:
rblob = encode_blob(rblob)
if oblob is not None:
oblob = encode_blob(oblob)
if sblob is not None:
sblob = encode_blob(sblob)
if gp:
gp = [encode_blob(g) for g in gp]
obj = {'measured_response': rblob,
'requested_output': oblob,
'setpoint_stream': sblob,
'snapshots': per_spec.snapshots,
'videos': per_spec.videos,
'grain_polygons': gp,
'extraction_context': per_spec.extraction_context}
pid = per_spec.pid
if pid:
obj['pid'] = pid
for e in EXTRACTION_ATTRS:
v = getattr(per_spec.run_spec, e)
obj[e] = v
if not per_spec.positions:
ps = [dict()]
else:
ps = []
for i, pp in enumerate(per_spec.positions):
pos, x, y, z = None, None, None, None
if isinstance(pp, tuple):
if len(pp) == 2:
x, y = pp
elif len(pp) == 3:
x, y, z = pp
else:
pos = pp
try:
ep = per_spec.extraction_positions[i]
x = ep[0]
y = ep[1]
if len(ep) == 3:
z = ep[2]
except IndexError:
self.debug('no extraction position for {}'.format(pp))
except TypeError:
self.debug('invalid extraction position')
try:
pos = int(pos)
except BaseException:
pos = None
pd = {'x': x, 'y': y, 'z': z, 'position': pos, 'is_degas': per_spec.run_spec.identifier == 'dg'}
ps.append(pd)
obj['positions'] = ps
self._positions = ps
hexsha = self.dvc.get_meta_head()
obj['commit'] = str(hexsha)
path = self._make_path(modifier='extraction')
dvc_dump(obj, path)
self.info('================= post extraction save finished =================')
def pre_measurement_save(self):
pass
def post_measurement_save(self, commit=True, commit_tag='COLLECTION', push=True):
"""
save
- analysis.json
- analysis.monitor.json
check if unique spectrometer.json
commit changes
push changes
:return:
"""
self.info('================= post measurement save started =================')
ret = True
ar = self.active_repository
# save spectrometer
spec_sha = self._get_spectrometer_sha()
spec_path = os.path.join(ar.path, '{}.json'.format(spec_sha))
if not os.path.isfile(spec_path):
self._save_spectrometer_file(spec_path)
# self.dvc.meta_repo.save_gains(self.per_spec.run_spec.mass_spectrometer,
# self.per_spec.gains)
# save analysis
if not self.per_spec.timestamp:
timestamp = datetime.now()
else:
timestamp = self.per_spec.timestamp
# check repository identifier before saving
# will modify repository to NoRepo if repository_identifier does not exist
self._check_repository_identifier()
self._save_analysis(timestamp)
# save monitor
self._save_monitor()
# save peak center
self._save_peak_center(self.per_spec.peak_center)
# stage files
dvc = self.dvc
if self.stage_files:
if commit:
try:
ar.smart_pull(accept_their=True)
paths = [spec_path, ] + [self._make_path(modifier=m) for m in NPATH_MODIFIERS]
for p in paths:
if os.path.isfile(p):
ar.add(p, commit=False)
else:
self.debug('not at valid file {}'.format(p))
# commit files
ar.commit('<{}>'.format(commit_tag))
# commit default data reduction
add = False
p = self._make_path('intercepts')
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
p = self._make_path('baselines')
if os.path.isfile(p):
ar.add(p, commit=False)
add = True
if add:
ar.commit('<ISOEVO> default collection fits')
for pp, tag, msg in (('blanks', 'BLANKS',
'preceding {}'.format(self.per_spec.previous_blank_runid)),
('icfactors', 'ICFactor', 'default')):
p = self._make_path(pp)
if os.path.isfile(p):
ar.add(p, commit=False)
ar.commit('<{}> {}'.format(tag, msg))
if push:
# push changes
dvc.push_repository(ar)
# update meta
dvc.meta_pull(accept_our=True)
dvc.meta_commit('repo updated for analysis {}'.format(self.per_spec.run_spec.runid))
if push:
# push commit
dvc.meta_push()
except GitCommandError as e:
self.warning(e)
if self.confirmation_dialog('NON FATAL\n\n'
'DVC/Git upload of analysis not successful.'
'Do you want to CANCEL the experiment?\n',
timeout_ret=False,
timeout=30):
ret = False
with dvc.session_ctx():
try:
ret = self._save_analysis_db(timestamp) and ret
except DatabaseError as e:
self.warning_dialog('Fatal Error. Cannot save analysis to database. Cancelling '
'experiment. {}'.format(e))
ret = False
self.info('================= post measurement save finished =================')
return ret
def save_run_log_file(self, path):
if self.save_enabled and self.save_log_enabled:
self.debug('saving run log file')
npath = self._make_path('logs', '.log')
shutil.copyfile(path, npath)
ar = self.active_repository
ar.smart_pull(accept_their=True)
ar.add(npath, commit=False)
ar.commit('<COLLECTION> log')
self.dvc.push_repository(ar)
# private
def _load_arar_mapping(self):
"""
Isotope: IsotopeKey
example arar_mapping.yaml
{
Ar40: 'Ar40',
Ar39: 'Ar39',
Ar38: 'Ar38',
Ar37: 'Ar37',
Ar36: 'Ar36L1'
}
:return:
"""
p = os.path.join(paths.setup_dir, 'arar_mapping.yaml')
if os.path.isfile(p):
self.debug('loading arar mapping from {}'.format(p))
# with open(p, 'r') as rfile:
try:
obj = yload(p)
except YAMLError:
obj = {}
for k in ARGON_KEYS:
if k not in obj:
self.warning('Invalid arar_mapping.yaml file. required keys={}'.format(ARGON_KEYS))
return
self.arar_mapping = obj
def _check_repository_identifier(self):
repo_id = self.per_spec.run_spec.repository_identifier
db = self.dvc.db
repo = db.get_repository(repo_id)
if repo is None:
self.warning('No repository named ="{}" changing to NoRepo'.format(repo_id))
self.per_spec.run_spec.repository_identifier = 'NoRepo'
repo = db.get_repository('NoRepo')
if repo is None:
db.add_repository('NoRepo', self.default_principal_investigator)
def _save_analysis_db(self, timestamp):
ps = self.per_spec
rs = ps.run_spec
d = {k: getattr(rs, k) for k in ('uuid', 'analysis_type', 'aliquot',
'increment', 'mass_spectrometer',
WEIGHT,
CLEANUP, PRECLEANUP, POSTCLEANUP, CRYO_TEMP,
DURATION, EXTRACT_VALUE, EXTRACT_UNITS)}
d['comment'] = rs.comment[:200] if rs.comment else ''
ed = rs.extract_device
if ed in NULL_EXTRACT_DEVICES:
d['extract_device'] = 'No Extract Device'
else:
d['extract_device'] = ed
d['timestamp'] = timestamp
# save script names
d['measurementName'] = ps.measurement_name
d['extractionName'] = ps.extraction_name
d['experiment_type'] = self.per_spec.experiment_type
db = self.dvc.db
an = db.add_analysis(**d)
if an is None:
self.warning('Failed adding analysis to database. See full log for error')
return
# save currents
self._save_currents(an)
# for iso in ps.isotope_group.isotopes.values():
# self.add_current(iso)
# db.add_analysis_result(an, iso)
# save media
if ps.snapshots:
for p in ps.snapshots:
db.add_media(p, an)
if ps.videos:
for p in ps.videos:
db.add_media(p, an)
if self._positions:
if rs.load_name and rs.load_name != NULL_STR:
load_name = rs.load_name
load_holder = rs.load_holder
db.add_load(load_name, load_holder, rs.username)
db.flush()
db.commit()
for position in self._positions:
self.debug('adding measured position {}'.format(position))
if not db.add_measured_position(an, load=load_name, **position):
self.warning('failed adding position {}, load={}'.format(position, load_name))
# all associations are handled by the ExperimentExecutor._retroactive_experiment_identifiers
# *** _retroactive_experiment_identifiers is currently disabled ***
if ps.use_repository_association:
db.add_repository_association(rs.repository_identifier, an)
self.debug('get identifier "{}"'.format(rs.identifier))
pos = db.get_identifier(rs.identifier)
self.debug('setting analysis irradiation position={}'.format(pos))
if pos is None:
an.simple_identifier=int(rs.identifier)
else:
an.irradiation_position = pos
t = ps.tag
db.flush()
change = db.add_analysis_change(tag=t)
an.change = change
db.commit()
return True
def _save_currents(self, dban):
dvc = self.dvc
if dvc.update_currents_enabled:
ps = self.per_spec
db = dvc.db
for key, iso in ps.isotope_group.isotopes.items():
param = db.add_parameter('{}_intercept'.format(key))
db.add_current(dban, iso.value, iso.error, param, iso.units)
param = db.add_parameter('{}_blank'.format(key), iso.blank.units)
db.add_current(dban, iso.blank.value, iso.blank.error, param, iso.blank.units)
param = db.add_parameter('{}_bs_corrected'.format(key))
v = iso.get_baseline_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter('{}_ic_corrected'.format(key))
v = iso.get_ic_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(key)
v = iso.get_non_detector_corrected_value()
db.add_current(dban, nominal_value(v), std_dev(v), param, iso.units)
param = db.add_parameter(iso.baseline.name)
db.add_current(dban, iso.baseline.value, iso.baseline.error, param, iso.baseline.units)
param = db.add_parameter('{}_n'.format(iso.baseline.name))
db.add_current(dban, iso.baseline.n, None, param, 'int')
param = db.add_parameter('{}_n'.format(iso.name))
db.add_current(dban, iso.n, None, param, 'int')
def _save_analysis(self, timestamp):
isos = {}
dets = {}
signals = []
baselines = []
sniffs = []
blanks = {}
intercepts = {}
cbaselines = {}
icfactors = {}
endianness = '>'
per_spec = self.per_spec
source = {'emission': per_spec.emission,
'trap': per_spec.trap}
clf = None
if self.use_isotope_classifier:
clf = self.application.get_service('pychron.classifier.isotope_classifier.IsotopeClassifier')
for key, iso in per_spec.isotope_group.items():
sblob = encode_blob(iso.pack(endianness, as_hex=False))
snblob = encode_blob(iso.sniff.pack(endianness, as_hex=False))
for ss, blob in ((signals, sblob), (sniffs, snblob)):
d = {'isotope': iso.name, 'detector': iso.detector, 'blob': blob}
ss.append(d)
detector = next((d for d in per_spec.active_detectors if d.name == iso.detector), None)
isod = {'detector': iso.detector, 'name': iso.name,
'serial_id': detector.serial_id if detector else '00000'}
if clf is not None:
klass, prob = clf.predict_isotope(iso)
isod.update(classification=klass,
classification_probability=prob)
isos[key] = isod
if iso.detector not in dets:
bblob = encode_blob(iso.baseline.pack(endianness, as_hex=False))
baselines.append({'detector': iso.detector, 'blob': bblob})
dets[iso.detector] = {'deflection': per_spec.defl_dict.get(iso.detector),
'gain': per_spec.gains.get(iso.detector)}
icfactors[iso.detector] = {'value': float(nominal_value(iso.ic_factor or 1)),
'error': float(std_dev(iso.ic_factor or 0)),
'fit': 'default',
'references': []}
cbaselines[iso.detector] = {'fit': iso.baseline.fit,
'error_type': iso.baseline.error_type,
'filter_outliers_dict': iso.baseline.filter_outliers_dict,
'value': float(iso.baseline.value),
'error': float(iso.baseline.error)}
intercepts[key] = {'fit': iso.fit,
'error_type': iso.error_type,
'filter_outliers_dict': iso.filter_outliers_dict,
'value': float(iso.value),
'error': float(iso.error)}
blanks[key] = {'fit': 'previous',
'error_type': '',
'references': [{'record_id': per_spec.previous_blank_runid,
'exclude': False}],
'value': float(iso.blank.value),
'error': float(iso.blank.error)}
obj = self._make_analysis_dict()
from pychron.version import __version__ as pversion
from pychron.experiment import __version__ as eversion
from pychron.dvc import __version__ as dversion
obj['timestamp'] = timestamp.isoformat()
obj['collection_version'] = '{}:{}'.format(eversion, dversion)
obj['acquisition_software'] = 'pychron {}'.format(pversion)
obj['data_reduction_software'] = 'pychron {}'.format(pversion)
obj['environmental'] = {'lab_temperatures': per_spec.lab_temperatures,
'lab_humiditys': per_spec.lab_humiditys,
'lab_pneumatics': per_spec.lab_pneumatics}
obj['laboratory'] = per_spec.laboratory
obj['instrument_name'] = per_spec.instrument_name
obj['analyst_name'] = per_spec.run_spec.username
obj['whiff_result'] = per_spec.whiff_result
obj['detectors'] = dets
obj['isotopes'] = isos
obj['spec_sha'] = self._get_spectrometer_sha()
obj['intensity_scalar'] = per_spec.intensity_scalar
obj['source'] = source
# save the conditionals
obj['conditionals'] = [c.to_dict() for c in per_spec.conditionals] if \
per_spec.conditionals else None
obj['tripped_conditional'] = per_spec.tripped_conditional.result_dict() if \
per_spec.tripped_conditional else None
# save the scripts
ms = per_spec.run_spec.mass_spectrometer
for si in ('measurement', 'extraction', 'post_measurement', 'post_equilibration', 'hops'):
name = getattr(per_spec, '{}_name'.format(si))
blob = getattr(per_spec, '{}_blob'.format(si))
if name:
self.dvc.meta_repo.update_script(ms, name, blob)
obj[si] = name
# save keys for the arar isotopes
akeys = self.arar_mapping
if akeys is None:
akeys = ARAR_MAPPING
obj['arar_mapping'] = akeys
# save experiment
self.debug('---------------- Experiment Queue saving disabled')
# self.dvc.update_experiment_queue(ms, self.per_spec.experiment_queue_name,
# self.per_spec.experiment_queue_blob)
self._save_macrochron(obj)
hexsha = str(self.dvc.get_meta_head())
obj['commit'] = hexsha
# dump runid.json
p = self._make_path()
dvc_dump(obj, p)
p = self._make_path(modifier='intercepts')
dvc_dump(intercepts, p)
# dump runid.blank.json
p = self._make_path(modifier='blanks')
dvc_dump(blanks, p)
p = self._make_path(modifier='baselines')
dvc_dump(cbaselines, p)
p = self._make_path(modifier='icfactors')
dvc_dump(icfactors, p)
# dump runid.data.json
p = self._make_path(modifier='.data')
data = {'commit': hexsha,
'encoding': 'base64',
'format': '{}ff'.format(endianness),
'signals': signals, 'baselines': baselines, 'sniffs': sniffs}
dvc_dump(data, p)
def _save_macrochron(self, obj):
pass
def _save_monitor(self):
if self.per_spec.monitor:
p = self._make_path(modifier='monitor')
checks = []
for ci in self.per_spec.monitor.checks:
data = encode_blob(pack('>ff', ci.data))
params = dict(name=ci.name,
parameter=ci.parameter, criterion=ci.criterion,
comparator=ci.comparator, tripped=ci.tripped,
data=data)
checks.append(params)
dvc_dump(checks, p)
def _save_spectrometer_file(self, path):
obj = dict(spectrometer=dict(self.per_spec.spec_dict),
gains=dict(self.per_spec.gains),
deflections=dict(self.per_spec.defl_dict),
settings=self.per_spec.settings)
# hexsha = self.dvc.get_meta_head()
# obj['commit'] = str(hexsha)
dvc_dump(obj, path)
def _save_peak_center(self, pc):
self.info('DVC saving peakcenter')
p = self._make_path(modifier='peakcenter')
if pc:
fmt = '>ff'
obj = {'reference_detector': pc.reference_detector.name,
'reference_isotope': pc.reference_isotope,
'fmt': fmt,
'interpolation': pc.interpolation_kind if pc.use_interpolation else ''}
results = pc.get_results()
if results:
for result in results:
points = encode_blob(pack(fmt, result.points))
obj[result.detector] = {'low_dac': result.low_dac,
'center_dac': result.center_dac,
'high_dac': result.high_dac,
'low_signal': result.low_signal,
'center_signal': result.center_signal,
'high_signal': result.high_signal,
'resolution': result.resolution,
'low_resolving_power': result.low_resolving_power,
'high_resolving_power': result.high_resolving_power,
'points': points}
dvc_dump(obj, p)
def _make_path(self, modifier=None, extension='.json'):
runid = self.per_spec.run_spec.runid
uuid = self.per_spec.run_spec.uuid
repository_identifier = self.per_spec.run_spec.repository_identifier
if self.use_uuid_path_name:
name = uuid, uuid
else:
name = runid, runid
return analysis_path(name, repository_identifier, modifier, extension, mode='w')
def _make_analysis_dict(self, keys=None):
if keys is None:
keys = META_ATTRS
def get(ki):
obj = self.per_spec
if not hasattr(obj, ki):
obj = self.per_spec.run_spec
try:
return getattr(obj, ki)
except AttributeError as e:
self.warning('Attribute error: attr={}, error={}'.format(ki, e))
d = {k: get(k) for k in keys}
return d
def _get_spectrometer_sha(self):
"""
return a sha-1 hash.
generate using spec_dict, defl_dict, and gains
spec_dict: source parameters, cdd operating voltage
defl_dict: detector deflections
gains: detector gains
make hash using
for key,value in dictionary:
sha1.update(key)
sha1.update(value)
to ensure consistence, dictionaries are sorted by key
for key,value in sorted(dictionary)
:return:
"""
return spectrometer_sha(self.per_spec.settings,
self.per_spec.spec_dict, self.per_spec.defl_dict, self.per_spec.gains)
# ============= EOF =============================================
# self._save_measured_positions()
#
#
# def _save_measured_positions(self):
# dvc = self.dvc
#
# load_name = self.per_spec.load_name
# for i, pp in enumerate(self.per_spec.positions):
# if isinstance(pp, tuple):
# if len(pp) > 1:
# if len(pp) == 3:
# dvc.add_measured_position('', load_name, x=pp[0], y=pp[1], z=pp[2])
# else:
# dvc.add_measured_position('', load_name, x=pp[0], y=pp[1])
# else:
# dvc.add_measured_position(pp[0], load_name)
#
# else:
# dbpos = dvc.add_measured_position(pp, load_name)
# try:
# ep = self.per_spec.extraction_positions[i]
# dbpos.x = ep[0]
# dbpos.y = ep[1]
# if len(ep) == 3:
# dbpos.z = ep[2]
# except IndexError:
# self.debug('no extraction position for {}'.format(pp))
| apache-2.0 | -5,685,082,766,605,887,000 | 36.341432 | 121 | 0.512517 | false | 4.1256 | false | false | false |
SalehHindi/DC-Killer | SMS_backend.py | 1 | 1532 | '''
This portion of the program sends an SMS
message to the given number. This is one
way the program will communicate with the
user. Another potential way is through
a push notification to a phone if this
was a phone app. I noticed that Chrome
supports browser based push notifications
that are really nice and non obtrusive.
Maybe use that instead.
'''
#TODO: 2 Figure out how Facebook sends push notification to the browser. This might go under web_frontend.py
#TODO: 1 Return the status of each message sent. This would be higher priority in product sold for money.
from twilio.rest import TwilioRestClient
class send_SMS():
"""
Uses Twilio to send an SMS to a given number
"""
def __init__(self):
"""
Intializes self.client which is responsible for sending and receiving SMS in Twilio
"""
self.account_sid = "AC7162b52b47f7383aec3ad400e9cc40e4"
self.auth_token = "c4dbfa0c3ed665fd699eac2f99d4976e"
self.client = TwilioRestClient(self.account_sid, self.auth_token)
def send(self, number, SMSbody):
"""
Sends SMS to a phone number corresponding to the number variable
takes: number in the "+1XXXXXXX" format. SMSbody which should be <140 characters
returns: nothing
"""
self.message = self.client.messages.create(to="+19373295511", from_="+19378136340" body=SMSbody)
self.receivedSMS = self.client.messages.list(To='19378136340')
print self.receivedSMS
obj=send_SMS()
obj.send('+19373295511') | gpl-2.0 | 4,377,318,372,772,337,000 | 36.390244 | 108 | 0.710836 | false | 3.656325 | false | false | false |
lidiamcfreitas/FenixScheduleMaker | ScheduleMaker/login.py | 2 | 1507 | """this module makes the connection to fenixedu information and ScheduleMaker"""
import webbrowser
from datetime import datetime
import fenixedu
class Login():
""" Login - asks authorization from user and his code"""
def __init__(self):
config = fenixedu.FenixEduConfiguration.fromConfigFile('fenixedu.ini')
self.client = fenixedu.FenixEduClient(config)
self.year = datetime.now().hour
url = self.client.get_authentication_url()
webbrowser.open_new(url)
code = input("please insert the code that is presented to you on the browser:")
self.user = self.client.get_user_by_code(code)
def getClient(self):
"""returns login's client"""
return self.client
def getUser(self):
"""returns login's user"""
return self.user
def getCoursesUrls(self):
""" get courses list of urls and of it's ids """
data = self.client.get_person_courses(self.user)
urls = []
ids = []
for i in range(len(data['enrolments'])):
urls += [data['enrolments'][i]['url']]
ids += [data['enrolments'][i]['id']]
return (urls, ids)
def getDegrees(self):
""" returns a tuple consisting of offered degrees and it's ids """
data = self.client.get_degrees()
degrees = []
ids = []
for i in range(len(data)):
degrees += [data[i]['name']]
ids += [data[i]['id']]
return (degrees, ids)
| bsd-2-clause | 2,121,949,577,441,245,200 | 27.980769 | 87 | 0.587923 | false | 3.884021 | false | false | false |
hadim/fiji_scripts | src/main/resources/script_templates/Hadim_Scripts/Wounding/3_Wounding_Analyzer.py | 2 | 3148 | # @Float(label="Pixel Size (um)", required=false, value=1) pixel_size
# @Float(label="Duration between Two Frames", required=false, value=1) dt
# @String(label="Time unit", required=false, value='s', choices={'sec', 'min', 'hours'}) time_unit
# @Dataset data
# @ImagePlus imp
# @ImageJ ij
import os
import sys
from java.io import File
from ij.measure import ResultsTable
from fiji.plugin.trackmate.visualization.hyperstack import HyperStackDisplayer
from fiji.plugin.trackmate.io import TmXmlReader
from fiji.plugin.trackmate import Logger
from fiji.plugin.trackmate import Settings
from fiji.plugin.trackmate import SelectionModel
from fiji.plugin.trackmate.providers import DetectorProvider
from fiji.plugin.trackmate.providers import TrackerProvider
from fiji.plugin.trackmate.providers import SpotAnalyzerProvider
from fiji.plugin.trackmate.providers import EdgeAnalyzerProvider
from fiji.plugin.trackmate.providers import TrackAnalyzerProvider
from fiji.plugin.trackmate.visualization import PerTrackFeatureColorGenerator
logger = Logger.IJ_LOGGER
### Open and display tracks
dir_path = os.path.dirname(data.getSource())
trackmate_path = os.path.join(dir_path, "Trajectories.xml")
stats_path = os.path.join(dir_path, "Statistics.csv")
reader = TmXmlReader(File(trackmate_path))
if not reader.isReadingOk():
sys.exit(reader.getErrorMessage())
model = reader.getModel()
spots = model.getSpots()
trackIDs = model.getTrackModel().trackIDs(True)
settings = Settings()
detectorProvider = DetectorProvider()
trackerProvider = TrackerProvider()
spotAnalyzerProvider = SpotAnalyzerProvider()
edgeAnalyzerProvider = EdgeAnalyzerProvider()
trackAnalyzerProvider = TrackAnalyzerProvider()
reader.readSettings(settings, detectorProvider, trackerProvider,
spotAnalyzerProvider, edgeAnalyzerProvider,
trackAnalyzerProvider)
logger.log(str(settings))
sm = SelectionModel(model)
displayer = HyperStackDisplayer(model, sm, imp)
color = PerTrackFeatureColorGenerator(model, 'TRACK_INDEX')
displayer.setDisplaySettings('TrackDisplaymode', 0)
displayer.setDisplaySettings('TrackDisplayDepth', 20)
displayer.setDisplaySettings('TrackColoring', color)
displayer.render()
### Build stats table
fm = model.getFeatureModel()
table = ResultsTable()
for id in model.getTrackModel().trackIDs(True):
table.incrementCounter()
track = model.getTrackModel().trackSpots(id)
table.addValue('Track ID', id)
table.addValue('TRACK_DURATION (%s)' % time_unit, fm.getTrackFeature(id, 'TRACK_DURATION') * dt)
table.addValue('TRACK_DISPLACEMENT (um)', fm.getTrackFeature(id, 'TRACK_DISPLACEMENT') * pixel_size)
table.addValue('TRACK_MEAN_SPEED (um/%s)' % time_unit, fm.getTrackFeature(id, 'TRACK_MEAN_SPEED') * pixel_size / dt)
table.addValue('TRACK_MIN_SPEED (um/%s)' % time_unit, fm.getTrackFeature(id, 'TRACK_MIN_SPEED') * pixel_size / dt)
table.addValue('TRACK_MAX_SPEED (um/%s)' % time_unit, fm.getTrackFeature(id, 'TRACK_MAX_SPEED') * pixel_size / dt)
table.addValue('TRACK_STD_SPEED (um/%s)' % time_unit, fm.getTrackFeature(id, 'TRACK_STD_SPEED') * pixel_size / dt)
table.save(stats_path)
table.show("Statistics") | bsd-3-clause | 5,236,868,259,599,146,000 | 35.616279 | 117 | 0.773189 | false | 3.306723 | false | false | false |
PROBIC/tigreBrowser | tigreBrowser/results.py | 2 | 13555 | from tigreBrowser.database import *
import xdrlib
class Results:
"""This class is responsible for getting the results from the database
using functions in database.py.
"""
def __init__(self, db, experiment_set, regulator, target_set, experiment, filters, supplementary_options, search_genes):
"""Initialization. Does not, however, fetch the results.
Parameters:
db: database object
experiment_set: name of the experiment set
regulator: name of the regulator if it exists
target_set: name of the target set if it exists
experiment: name of the experiment, appended with '_diff'
if sorting is to be done by diff.
Can also be 'zscore' if sorting is done by z-scores
filters: filters that will be applied when fetching
and filtering results (see create_filter_query() in
database.py for explanation about the parameter type)
supplementary_options: highlights will be fetched according
to these options (see create_filter_query()
in database.py for explanation about
the parameter type)
search_genes: search these gene names or aliases (list of strings)
"""
self.__db = db
self.__set_id = db.get_experiment_set_id(experiment_set)
self.__reg_id = db.get_regulator_id(regulator)
self.__set_experiment_ids = db.get_experiment_ids_in_set_recursively(self.__set_id)
if target_set:
self.__target_ids = db.get_gene_ids_dict(target_set).values()
self.__experiment_id = db.get_experiment_id(experiment.replace('_diff', ''), self.__reg_id)
self.__sort_by_diff = experiment.endswith('_diff')
self.__sort_by_zscore = False
if experiment == 'zscore':
self.__experiment_id = self.__set_experiment_ids[0]
self.__sort_by_zscore = True
if not self.__experiment_id:
raise Exception('No experiment with the current TF')
self.__dataset_id = db.get_experiment_dataset_id(self.__experiment_id)
self.__supplementary_annotation_ids_dict = self.__create_supplementary_annotation_ids_dict(self.__reg_id)
self.__filters = filters
self.__supplementary_options = supplementary_options
regulator_experiment_ids = db.get_experiment_ids(self.__reg_id)
# If a regulator is defined (TARGET_RANKING),
# choose only the experiments in a set that have the given regulator.
# Use set intersection.
if self.__reg_id:
self.__set_experiment_ids = list(set(self.__set_experiment_ids) & set(regulator_experiment_ids))
self.__search_gene_ids = None
if search_genes:
self.__search_gene_ids = db.get_gene_ids_by_name_in_experiments(self.__set_experiment_ids, search_genes)
if not self.__set_experiment_ids:
raise Exception('No results for the given selection')
for (name, symbol, value) in supplementary_options:
if name not in self.__supplementary_annotation_ids_dict:
raise Exception('Supplementary option %s not available for the given TF' % name)
self.__results_all = {}
self.__aliases_all = {}
self.__params_all = {}
self.__supps_all = {}
self.__zscores_all = {}
self.__probe_names = {}
self.__highlights = {}
def __create_supplementary_annotation_ids_dict(self, reg_id):
d = {}
for (name, ann_id) in self.__db.get_supplementary_annotation_ids(reg_id):
d[name] = ann_id
return d
def __parse_results(self, results):
self.__experiment_parameter_names = self.__get_experiment_parameter_names()
result_gene_ids = set()
probe_names = {}
results_all = {}
params_all = {}
for row in results:
probe_name = row[0]
desc = row[1]
likelihood = row[2]
baseline_likelihood = row[3]
gene_id = row[4]
param_values = row[5]
exp_id = row[6]
result_gene_ids.add(gene_id)
results_all.setdefault(gene_id, {})[desc] = likelihood
# add results with appended '_baseline' and '_diff'
# if baseline is defined to show them in results listing
if baseline_likelihood:
results_all[gene_id][desc + '_baseline'] = baseline_likelihood
results_all[gene_id][desc + '_diff'] = likelihood - baseline_likelihood
probe_names[gene_id] = probe_name
params_all.setdefault(gene_id, {})[exp_id] = self.__map_params_to_names(exp_id, self.__experiment_parameter_names.get(exp_id, []), param_values)
return result_gene_ids, probe_names, results_all, params_all
def __query_supplementary_datas(self, gene_ids, supp_ids):
supps = self.__db.get_gene_supplementary_datas(gene_ids, supp_ids)
d = {}
for (supp_id, name, value) in supps:
d.setdefault(supp_id, {})[name] = value
return d
def __query_gene_aliases(self, gene_ids):
aliases = self.__db.get_gene_aliases(gene_ids)
d = {}
for (alias_id, alias_class, alias) in aliases:
d.setdefault(alias_id, {}).setdefault(alias_class, []).append(alias)
return d
def __query_zscores(self, gene_ids, dataset_id):
d = {}
for (gene_id, zscore) in self.__db.get_z_scores(gene_ids, dataset_id):
d[gene_id] = zscore
return d
def __parse_rdata_double_raw_vector(self, data_buffer):
# Python 2.5 and 3.x compatibility code
try:
header = [ord(x) for x in [data_buffer[0], data_buffer[1]]]
except TypeError:
header = [data_buffer[0], data_buffer[1]]
# RData binary format
if header[0] != ord('X') or header[1] != ord('\n'):
return None
xdr = xdrlib.Unpacker(data_buffer[2:])
xdr.unpack_int()
xdr.unpack_int()
xdr.unpack_int()
xdr.unpack_int()
xdr.unpack_int()
data = []
while True:
try:
data.append(xdr.unpack_double())
except EOFError:
break
return data
def __map_params_to_names(self, exp_id, param_names, param_values):
if not param_values:
return {}
param_values = self.__parse_rdata_double_raw_vector(param_values)
return dict(zip(param_names, param_values))
def fetch_results(self, number_of_genes, offset):
"""Fetches the results from the database.
Parameters:
number_of_genes: fetch at most this many genes
offset: offset in the results listing
Returns: (gene ids for the result genes, total number of genes with results)
"""
fq, fa = self.__db.create_filter_query(self.__reg_id, self.__filters, [], self.__supplementary_annotation_ids_dict)
sq, sa = self.__db.create_sort_query(fq, fa, self.__experiment_id, self.__sort_by_diff, self.__sort_by_zscore)
if self.__search_gene_ids != None:
gene_ids = self.__search_gene_ids
else:
gene_ids = self.__db.get_gene_ids_for_results(sq, sa)
count = len(gene_ids) # total result count
gene_ids = gene_ids[offset:(offset + number_of_genes)] # OFFSET, LIMIT
results = self.__db.get_results_for_gene_ids(self.__set_experiment_ids, gene_ids)
result_gene_ids, self.__probe_names, self.__results_all, self.__params_all = self.__parse_results(results)
if not results: # quick fix for the result count
count = 0
# remove gene ids that are in gene_ids but not in result_gene_ids
l = gene_ids[:]
for gene_id in l:
if gene_id not in result_gene_ids:
gene_ids.remove(gene_id)
self.__supps_all = self.__query_supplementary_datas(gene_ids, self.__supplementary_annotation_ids_dict)
self.__aliases_all = self.__query_gene_aliases(gene_ids)
self.__zscores_all = self.__query_zscores(gene_ids, self.__dataset_id)
self.__highlights = self.__db.get_highlights(self.__supplementary_options, self.__supplementary_annotation_ids_dict, gene_ids)
return gene_ids, count
def get_experiment_results(self, gene_id):
"""Returns a dictionary of experiment results for the given gene_id.
Returns: {experiment_name, log_likelihood or baseline_log_likelihood}
Example:
{'GPDISIM_diff': 494.65294095332217, 'GPDISIM': 6.7597099032014309, 'GPDISIM_baseline': -487.89323105012073}
"""
return self.__results_all.get(gene_id)
def get_zscore(self, gene_id):
"""Returns z-score for the given gene_id.
"""
return self.__zscores_all.get(gene_id)
def get_supplementary_data(self, gene_id):
"""Returns a dictionary representing supplementary data
for the given gene_id.
Returns: {supplementary dataset name, value}
Example:
{'ischip': 1.0, 'chipdist': 1671.0, 'isinsitu': 0.0, 'hasinsitu': 0.0}
"""
return self.__supps_all.get(gene_id, {})
def get_parameters(self, gene_id):
"""Returns a dictionary of parameters in different experiments
for the given gene id.
Returns: {experiment id: {parameter name: parameter value}}
Example:
{1: {'rbf1_variance/disim1_rbf_variance': 0.59039292169555113,
'disim1_variance': -5.9057868788275316,
'disim1_decay': -3.9377851775471258,
'disim1_di_variance': 0.0,
'Basal1': -8.9106876980653453,
'disim1_di_decay': -4.0190835928767878,
'rbf1_inverseWidth/disim1_inverseWidth': -0.51096542027712455}
}
"""
return self.__params_all.get(gene_id, {})
def get_aliases(self, gene_id):
"""Returns a dictionary of aliases for the given gene id.
Returns: {alias class, [aliases]}
Example:
{'ENTREZID': [u'38211'],
'SYMBOL': [u'CG12011'],
'FLYBASE': [u'FBgn0035257'],
'GENENAME': [u'CG12011 gene product from transcript CG12011-RA']}
"""
return self.__aliases_all.get(gene_id, {})
def get_probe_name(self, gene_id):
"""Returns the probe name for the given gene id.
"""
return self.__probe_names.get(gene_id)
def get_highlights(self):
"""Gets a dictionary of supplementary dataset names to gene ids that
will be highlighted.
Returns: {supplementary dataset name: [gene ids]}
Example:
{'ischip': [4632, 8354, 10609], 'isinsitu': [], 'hasinsitu': [4632, 8354, 10609]}
"""
return self.__highlights
def get_dataset_figure(self):
"""Gets the template URL to the dataset figure.
Example:
http://www.something.com/something/figures/${probe_name}.png
"""
return self.__db.get_dataset_figure_filename(self.__dataset_id)
def get_experiment_figures(self):
"""Gets experiment figure annotations.
Returns: [(figure id, filename, name, description, priority)]
"""
return self.__db.get_experiment_figures(self.__set_experiment_ids)
def get_alias_annotations(self):
"""Gets all alias annotations.
Returns: [(alias annotation id, alias class, source, description)]
"""
return self.__db.get_alias_annotations(self.__dataset_id)
def get_experiment_names(self):
"""Gets a dictionary mapping from experiment ids to corresponding names.
Returns: {experiment id: experiment name}
Example:
{1: 'GPDISIM', 2: 'GPSIM'}
"""
exp_dict = {}
results = self.__db.get_experiment_ids_names(self.__set_experiment_ids)
for (exp_id, name) in results:
exp_dict[exp_id] = name
return exp_dict
def __get_experiment_parameter_names(self):
params_dict = {}
for exp_id in self.__set_experiment_ids:
names = self.__db.get_experiment_parameter_names(exp_id)
if not names:
continue
names = [name.strip() for name in names.strip().split(',')]
params_dict[exp_id] = names
return params_dict
def get_experiment_parameter_names(self):
"""Gets a dictionary mapping from experiment ids to a list of
parameter names.
Returns: {experiment id: [parameter names]}
Example:
{1: ['rbf1_inverseWidth/disim1_inverseWidth',
'rbf1_variance/disim1_rbf_variance',
'disim1_di_decay',
'disim1_di_variance',
'disim1_decay',
'disim1_variance',
'Basal1']
}
"""
return self.__experiment_parameter_names
def get_all_parameter_names(self):
"""Gets a list of parameters names in all experiments.
Returns: [parameter names]
"""
all_names = sum(self.__experiment_parameter_names.values(), [])
# remove duplicates
names = []
[names.append(name) for name in all_names if not names.count(name)]
return names
| agpl-3.0 | -7,790,328,812,109,092,000 | 37.183099 | 156 | 0.583401 | false | 3.858525 | false | false | false |
andreimaximov/algorithms | leetcode/algorithms/edit-distance/solution.py | 1 | 1443 | #!/usr/bin/env python
class Solution(object):
def minDistance(self, a, b):
"""
Returns the edit distance between strings a and b.
"""
n = len(a)
m = len(b)
# If either string is empty, we need to add all characters from other
# string.
if n == 0 or m == 0:
return max(n, m)
# n x m matrix where each dp[i][j] represents the edit distance for
# a[:i + 1] and b[:j + 1].
dp = [([0] * (m + 1)) for i in range(0, n + 1)]
for i in range(0, n + 1):
for j in range(0, m + 1):
if i == 0:
dp[i][j] = j
elif j == 0:
dp[i][j] = i
elif a[i - 1] == b[j - 1]:
# If the trailing characters are the same, we don't need to
# perform an operation to bring these characters in sync.
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = 1 + \
min(dp[i - 1][j - 1], # Replace a[i] with b[j]
dp[i][j - 1], # Add a[i] to b[:j] (Insert)
dp[i - 1][j]) # Add b[j] to a[:i] (Delete)
return dp[n][m]
def main():
print('Please run this solution on LeetCode.')
print('https://leetcode.com/problems/edit-distance/')
if __name__ == '__main__':
main()
| mit | -18,678,275,532,948,184 | 31.066667 | 79 | 0.417186 | false | 3.536765 | false | false | false |
jakesyl/pychess | lib/pychess/Players/PyChessFICS.py | 20 | 17313 | from __future__ import print_function
import email.Utils
from gi.repository import Gtk
import math
import pychess
import random
import signal
import subprocess
from threading import Thread
from pychess.compat import urlopen, urlencode
from pychess.Players.PyChess import PyChess
from pychess.System.prefix import addDataPrefix, isInstalled
from pychess.System.repeat import repeat_sleep
from pychess.System import GtkWorker, fident
from pychess.System.Log import log
from pychess.Utils.const import *
from pychess.Utils.lutils.LBoard import LBoard
from pychess.Utils.lutils.lmove import determineAlgebraicNotation, toLAN, parseSAN
from pychess.Utils.lutils import lsearch
from pychess.Utils.repr import reprResult_long, reprReason_long
from pychess.ic.FICSConnection import FICSMainConnection
class PyChessFICS(PyChess):
def __init__ (self, password, from_address, to_address):
PyChess.__init__(self)
self.ports = (23, 5000)
if not password:
self.username = "guest"
else: self.username = "PyChess"
self.owner = "Lobais"
self.password = password
self.from_address = "The PyChess Bot <%s>" % from_address
self.to_address = "Thomas Dybdahl Ahle <%s>" % to_address
# Possible start times
self.minutes = (1,2,3,4,5,6,7,8,9,10)
self.gains = (0,5,10,15,20)
# Possible colors. None == random
self.colors = (WHITE, BLACK, None)
# The amount of random challenges, that PyChess sends with each seek
self.challenges = 10
enableEGTB()
self.sudos = set()
self.ownerOnline = False
self.waitingForPassword = None
self.log = []
self.acceptedTimesettings = []
self.worker = None
repeat_sleep(self.sendChallenges, 60*1)
def __triangular(self, low, high, mode):
"""Triangular distribution.
Continuous distribution bounded by given lower and upper limits,
and having a given mode value in-between.
http://en.wikipedia.org/wiki/Triangular_distribution
"""
u = random.random()
c = (mode - low) / (high - low)
if u > c:
u = 1 - u
c = 1 - c
low, high = high, low
tri = low + (high - low) * (u * c) ** 0.5
if tri < mode:
return int(tri)
elif tri > mode:
return int(math.ceil(tri))
return int(round(tri))
def sendChallenges(self):
if self.connection.bm.isPlaying():
return True
statsbased = ((0.39197722779282, 3, 0),
(0.59341408108783, 5, 0),
(0.77320877377846, 1, 0),
(0.8246379941394, 10, 0),
(0.87388717406441, 2, 12),
(0.91443760169489, 15, 0),
(0.9286423058163, 4, 0),
(0.93891977227793, 2, 0),
(0.94674539138335, 20, 0),
(0.95321476842423, 2, 2),
(0.9594588808257, 5, 2),
(0.96564528079889, 3, 2),
(0.97173859621034, 7, 0),
(0.97774906636184, 3, 1),
(0.98357243654425, 5, 12),
(0.98881309737017, 5, 5),
(0.99319644938247, 6, 0),
(0.99675879556023, 3, 12),
(1, 5, 3))
#n = random.random()
#for culminativeChance, minute, gain in statsbased:
# if n < culminativeChance:
# break
culminativeChance, minute, gain = random.choice(statsbased)
#type = random.choice((TYPE_LIGHTNING, TYPE_BLITZ, TYPE_STANDARD))
#if type == TYPE_LIGHTNING:
# minute = self.__triangular(0,2+1,1)
# mingain = not minute and 1 or 0
# maxgain = int((3-minute)*3/2)
# gain = random.randint(mingain, maxgain)
#elif type == TYPE_BLITZ:
# minute = self.__triangular(0,14+1,5)
# mingain = max(int((3-minute)*3/2+1), 0)
# maxgain = int((15-minute)*3/2)
# gain = random.randint(mingain, maxgain)
#elif type == TYPE_STANDARD:
# minute = self.__triangular(0,20+1,12)
# mingain = max(int((15-minute)*3/2+1), 0)
# maxgain = int((20-minute)*3/2)
# gain = self.__triangular(mingain, maxgain, mingain)
#color = random.choice(self.colors)
self.extendlog(["Seeking %d %d" % (minute, gain)])
self.connection.glm.seek(minute, gain, True)
opps = random.sample(self.connection.players.get_online_playernames(),
self.challenges)
self.extendlog("Challenging %s" % op for op in opps)
for player in opps:
self.connection.om.challenge(player, minute, gain, True)
return True
def makeReady(self):
signal.signal(signal.SIGINT, Gtk.main_quit)
PyChess.makeReady(self)
self.connection = FICSMainConnection("freechess.org", self.ports,
self.username, self.password)
self.connection.connect("connectingMsg", self.__showConnectLog)
self.connection._connect()
self.connection.glm.connect("addPlayer", self.__onAddPlayer)
self.connection.glm.connect("removePlayer", self.__onRemovePlayer)
self.connection.cm.connect("privateMessage", self.__onTell)
self.connection.alm.connect("logOut", self.__onLogOut)
self.connection.bm.connect("playGameCreated", self.__onGameCreated)
self.connection.bm.connect("curGameEnded", self.__onGameEnded)
self.connection.bm.connect("boardUpdate", self.__onBoardUpdate)
self.connection.om.connect("onChallengeAdd", self.__onChallengeAdd)
self.connection.om.connect("onOfferAdd", self.__onOfferAdd)
self.connection.adm.connect("onAdjournmentsList", self.__onAdjournmentsList)
self.connection.em.connect("onAmbiguousMove", self.__onAmbiguousMove)
self.connection.em.connect("onIllegalMove", self.__onAmbiguousMove)
self.connection.adm.queryAdjournments()
self.connection.lvm.setVariable("autoflag", 1)
self.connection.fm.setFingerNote(1,
"PyChess is the chess engine bundled with the PyChess %s " % pychess.VERSION +
"chess client. This instance is owned by %s, but acts " % self.owner +
"quite autonomously.")
self.connection.fm.setFingerNote(2,
"PyChess is 100% Python code and is released under the terms of " +
"the GPL. The evalution function is largely equal to the one of" +
"GnuChess, but it plays quite differently.")
self.connection.fm.setFingerNote(3,
"PyChess runs on an elderly AMD Sempron(tm) Processor 3200+, 512 " +
"MB DDR2 Ram, but is built to take use of 64bit calculating when " +
"accessible, through the gpm library.")
self.connection.fm.setFingerNote(4,
"PyChess uses a small 500 KB openingbook based solely on Kasparov " +
"games. The engine doesn't have much endgame knowledge, but might " +
"in some cases access an online endgamedatabase.")
self.connection.fm.setFingerNote(5,
"PyChess will allow any pause/resume and adjourn wishes, but will " +
"deny takebacks. Draw, abort and switch offers are accepted, " +
"if they are found to be an advance. Flag is auto called, but " +
"PyChess never resigns. We don't want you to forget your basic " +
"mating skills.")
def main(self):
self.connection.run()
self.extendlog([str(self.acceptedTimesettings)])
self.phoneHome("Session ended\n"+"\n".join(self.log))
print("Session ended")
def run(self):
t = Thread(target=self.main, name=fident(self.main))
t.daemon = True
t.start()
Gdk.threads_init()
Gtk.main()
#===========================================================================
# General
#===========================================================================
def __showConnectLog (self, connection, message):
print(message)
def __onLogOut (self, autoLogoutManager):
self.connection.close()
#sys.exit()
def __onAddPlayer (self, gameListManager, player):
if player["name"] in self.sudos:
self.sudos.remove(player["name"])
if player["name"] == self.owner:
self.connection.cm.tellPlayer(self.owner, "Greetings")
self.ownerOnline = True
def __onRemovePlayer (self, gameListManager, playername):
if playername == self.owner:
self.ownerOnline = False
def __onAdjournmentsList (self, adjournManager, adjournments):
for adjournment in adjournments:
if adjournment["online"]:
adjournManager.challenge(adjournment["opponent"])
def __usage (self):
return "|| PyChess bot help file || " +\
"# help 'Displays this help file' " +\
"# sudo <password> <command> 'Lets PyChess execute the given command' "+\
"# sendlog 'Makes PyChess send you its current log'"
def __onTell (self, chatManager, name, title, isadmin, text):
if self.waitingForPassword:
if text.strip() == self.password or (not self.password and text == "none"):
self.sudos.add(name)
self.tellHome("%s gained sudo access" % name)
self.connection.client.run_command(self.waitingForPassword)
else:
chatManager.tellPlayer(name, "Wrong password")
self.tellHome("%s failed sudo access" % name)
self.waitingForPassword = None
return
args = text.split()
#if args == ["help"]:
# chatManager.tellPlayer(name, self.__usage())
if args[0] == "sudo":
command = " ".join(args[1:])
if name in self.sudos or name == self.owner:
# Notice: This can be used to make nasty loops
print(command, file=self.connection.client)
else:
print(repr(name), self.sudos)
chatManager.tellPlayer(name, "Please send me the password")
self.waitingForPassword = command
elif args == ["sendlog"]:
if self.log:
# TODO: Consider email
chatManager.tellPlayer(name, "\\n".join(self.log))
else:
chatManager.tellPlayer(name, "The log is currently empty")
else:
if self.ownerOnline:
self.tellHome("%s told me '%s'" % (name, text))
else:
def onlineanswer (message):
data = urlopen("http://www.pandorabots.com/pandora/talk?botid=8d034368fe360895",
urlencode({"message":message, "botcust2":"x"}).encode("utf-8")).read().decode('utf-8')
ss = "<b>DMPGirl:</b>"
es = "<br>"
answer = data[data.find(ss)+len(ss) : data.find(es,data.find(ss))]
chatManager.tellPlayer(name, answer)
t = Thread(target=onlineanswer,
name=fident(onlineanswer),
args=(text,))
t.daemon = True
t.start()
#chatManager.tellPlayer(name, "Sorry, your request was nonsense.\n"+\
# "Please read my help file for more info")
#===========================================================================
# Challenges and other offers
#===========================================================================
def __onChallengeAdd (self, offerManager, index, match):
#match = {"tp": type, "w": fname, "rt": rating, "color": color,
# "r": rated, "t": mins, "i": incr}
offerManager.acceptIndex(index)
def __onOfferAdd (self, offerManager, offer):
if offer.type in (PAUSE_OFFER, RESUME_OFFER, ADJOURN_OFFER):
offerManager.accept(offer)
elif offer.type in (TAKEBACK_OFFER,):
offerManager.decline(offer)
elif offer.type in (DRAW_OFFER, ABORT_OFFER, SWITCH_OFFER):
if self.__willingToDraw():
offerManager.accept(offer)
else: offerManager.decline(offer)
#===========================================================================
# Playing
#===========================================================================
def __onGameCreated (self, boardManager, ficsgame):
base = int(ficsgame.minutes)*60
inc = int(ficsgame.inc)
self.clock[:] = base, base
self.increment[:] = inc, inc
self.gameno = ficsgame.gameno
self.lastPly = -1
self.acceptedTimesettings.append((base, inc))
self.tellHome("Starting a game (%s, %s) gameno: %s" %
(ficsgame.wplayer.name, ficsgame.bplayer.name, ficsgame.gameno))
if ficsgame.bplayer.name.lower() == self.connection.getUsername().lower():
self.playingAs = BLACK
else:
self.playingAs = WHITE
self.board = LBoard(NORMALCHESS)
# Now we wait until we recieve the board.
def __go (self):
if self.worker:
self.worker.cancel()
self.worker = GtkWorker(lambda worker: PyChess._PyChess__go(self, worker))
self.worker.connect("published", lambda w, msg: self.extendlog(msg))
self.worker.connect("done", self.__onMoveCalculated)
self.worker.execute()
def __willingToDraw (self):
return self.scr <= 0 # FIXME: this misbehaves in all but the simplest use cases
def __onGameEnded (self, boardManager, ficsgame):
self.tellHome(reprResult_long[ficsgame.result] + " " + reprReason_long[ficsgame.reason])
lsearch.searching = False
if self.worker:
self.worker.cancel()
self.worker = None
def __onMoveCalculated (self, worker, sanmove):
if worker.isCancelled() or not sanmove:
return
self.board.applyMove(parseSAN(self.board,sanmove))
self.connection.bm.sendMove(sanmove)
self.extendlog(["Move sent %s" % sanmove])
def __onBoardUpdate (self, boardManager, gameno, ply, curcol, lastmove, fen, wname, bname, wms, bms):
self.extendlog(["","I got move %d %s for gameno %s" % (ply, lastmove, gameno)])
if self.gameno != gameno:
return
self.board.applyFen(fen)
self.clock[:] = wms/1000., bms/1000.
if curcol == self.playingAs:
self.__go()
def __onAmbiguousMove (self, errorManager, move):
# This is really a fix for fics, but sometimes it is necessary
if determineAlgebraicNotation(move) == SAN:
self.board.popMove()
move_ = parseSAN(self.board, move)
lanmove = toLAN(self.board, move_)
self.board.applyMove(move_)
self.connection.bm.sendMove(lanmove)
else:
self.connection.cm.tellOpponent(
"I'm sorry, I wanted to move %s, but FICS called " % move +
"it 'Ambigious'. I can't find another way to express it, " +
"so you can win")
self.connection.bm.resign()
#===========================================================================
# Utils
#===========================================================================
def extendlog(self, messages):
[log.info(m+"\n") for m in messages]
self.log.extend(messages)
del self.log[:-10]
def tellHome(self, message):
print(message)
if self.ownerOnline:
self.connection.cm.tellPlayer(self.owner, message)
def phoneHome(self, message):
SENDMAIL = '/usr/sbin/sendmail'
SUBJECT = "Besked fra botten"
p = subprocess.Popen([SENDMAIL, '-f',
email.Utils.parseaddr(self.from_address)[1],
email.Utils.parseaddr(self.to_address)[1]],
stdin=subprocess.PIPE)
print("MIME-Version: 1.0", file=p.stdin)
print("Content-Type: text/plain; charset=UTF-8", file=p.stdin)
print("Content-Disposition: inline", file=p.stdin)
print("From: %s" % self.from_address, file=p.stdin)
print("To: %s" % self.to_address, file=p.stdin)
print("Subject: %s" % SUBJECT, file=p.stdin)
print(file=p.stdin)
print(message, file=p.stdin)
print("Cheers", file=p.stdin)
p.stdin.close()
p.wait()
| gpl-3.0 | -6,922,454,449,462,359,000 | 40.123515 | 121 | 0.540923 | false | 3.905482 | false | false | false |
mtu-most/MOST-delta-stage | Welder.py | 1 | 3113 | #Name: Welder
#Info: Mangle GCode to work on welder (2014-02-10)
#Depend: GCode
#Type: postprocess
#Param: speed(float:10) target extruder speed (mm/s)
#Param: mindist(float:1) minimum travel distance to switch off welder (mm)
#Param: ON1(str:G4 P0) Command to insert after travel (line 1)
#Param: ON2(str:M42 P2 Sinf) Command to insert after travel (line 2)
#Param: ON3(str:) Command to insert after travel (line 3)
#Param: ON4(str:) Command to insert after travel (line 4)
#Param: OFF1(str:G4 P0) Command to insert before travel (line 1)
#Param: OFF2(str:M42 P2 Snan) Command to insert before travel (line 2)
#Param: OFF3(str:) Command to insert before travel (line 3)
#Param: OFF4(str:) Command to insert before travel (line 4)
import sys
__author__ = 'Bas Wijnen <[email protected]>'
__date__ = '2014-02-10'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
try:
infilename = filename
outfilename = filename
ON = ''.join (['%s\n' % x for x in (ON1, ON2, ON3, ON4) if x])
OFF = ''.join (['%s\n' % x for x in (OFF1, OFF2, OFF3, OFF4) if x])
except NameError:
assert len (sys.argv) in (3, 5)
infilename = sys.argv[1]
outfilename = sys.argv[2]
speed = float (sys.argv[3]) if len (sys.argv) > 3 else 40.
mindist = float (sys.argv[4]) if len (sys.argv) > 4 else 1.
ON = 'G4 P0\nM42 P2 Sinf\n'
OFF = 'G4 P0\nM42 P2 Snan\n'
extruding = False
pos = [0., 0., 0., 0.]
erel = None
rel = False
edata = [0.,0.]
def parse (line):
edata[0] = pos[3]
global rel, erel, extruding
if ';' in line:
l = line[:line.find (';')]
else:
l = line
components = l.split ()
if len (components) == 0:
return line
if components[0] == 'G90':
rel = False
if components[0] == 'G91':
rel = True
if components[0] == 'M82':
erel = False
if components[0] == 'M83':
erel = True
if components[0] == 'G92':
for w in components:
if w[0] in 'XYZ':
wh = ord (w[0]) - ord ('X')
pos[wh] = float (w[1:])
elif w[0] == 'E':
pos[3] = float (w[1:])
if components[0] not in ('G0', 'G1'):
return line
parts = {}
for p in components[1:]:
if p[0] in parts or p[0] not in 'XYZEF':
print 'warning: %s' % line
return line
parts[p[0]] = float (p[1:])
x = []
for i, c in enumerate ('XYZE'):
if c in parts:
x.append (parts[c] if (rel if i < 3 or erel is None else erel) else parts[c] - pos[i])
pos[i] += x[-1]
else:
x.append (0.)
dist = sum ([t ** 2 for t in x[:3]]) ** .5
if 'E' not in parts or x[3] <= 0:
if extruding and dist > mindist:
extruding = False
return OFF + line
return line
del parts['E']
t = x[3] / speed
parts['F'] = dist / t * 60.
ret = 'G1 ' + ' '.join (['%s%f' % (c, parts[c]) for c in parts])
if not extruding:
extruding = True
return ON + ret
edata[1] = pos[3]
return ret
try:
with open (infilename, "r") as f:
lines = f.readlines ()
with open (outfilename, "w") as f:
for line in lines:
f.write (parse (line.strip ()) + '\n')
except:
print ('something was wrong:', sys.exc_value)
| gpl-3.0 | 8,612,901,917,585,751,000 | 27.367925 | 89 | 0.596531 | false | 2.502412 | false | false | false |
MSMBA/msmba-workflow | msmba-workflow/srclib/wax/examples/more/fileviewer.py | 1 | 2245 | # fileviewer.py
# Skeleton for a simple file viewer.
from wax import *
from wax.tools.dirview import DirView
def isimage(path):
EXTENSIONS = [".jpg", ".png", ".gif", ".ico", ".bmp"]
path = path.lower()
for ext in EXTENSIONS:
if path.endswith(ext):
return True
return False
class MainFrame(Frame):
def Body(self):
self.splitter = Splitter(self)
self.dirview = DirView(self.splitter)
self.dirview.OnSelectionChanged = self.OnDirViewSelectionChanged
self.panel = self.MakeOverlayPanel(self.splitter)
self.panel.Select(0)
self.splitter.Split(self.dirview, self.panel, direction='v',
sashposition=200)
self.AddComponent(self.splitter, expand='both')
self.Pack()
self.Size = 600, 400
def MakeOverlayPanel(self, parent):
op = OverlayPanel(parent)
# window 1: a textbox
self.textbox = TextBox(op, multiline=1, wrap=0, readonly=1)
self.textbox.Font = Font("Courier New", 10)
op.AddComponent(self.textbox, expand='both')
# image 2: a panel
self.imagepanel = Panel(op)
op.AddComponent(self.imagepanel, expand='both')
# create Bitmap control w/ dummy image
dummy = ArtProvider((16,16)).GetBitmap('error', 'other')
self.bitmap = Bitmap(self.imagepanel, dummy)
self.imagepanel.AddComponent(self.bitmap, expand='both')
self.imagepanel.Pack()
op.Pack()
return op
def OnDirViewSelectionChanged(self, event):
path = self.dirview.GetPath()
if isimage(path):
self.panel.Select(1)
try:
bitmap = Image(path).ConvertToBitmap()
except:
self.ShowText("Image could not be loaded")
else:
self.bitmap.SetBitmap(bitmap)
self.imagepanel.Repack()
else:
self.ShowText(path)
def ShowText(self, text):
self.panel.Select(0)
self.textbox.Value = text
app = Application(MainFrame, title='fileviewer')
app.Run()
| gpl-2.0 | -7,461,295,372,494,172,000 | 30.536232 | 72 | 0.565256 | false | 3.938596 | false | false | false |
benjimons/FIR | fir_relations/views.py | 4 | 1626 | from json import dumps
from django.contrib.auth.decorators import login_required, user_passes_test
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from fir_relations.models import Relation
from incidents.views import is_incident_handler, is_incident_viewer
@login_required
@user_passes_test(is_incident_viewer)
def relations(request, content_type, object_id):
references = Relation.objects.filter(src_content_type=content_type,
src_object_id=object_id,
active=True).as_template_objects(request, relation_type='target')
referenced_by = Relation.objects.filter(tgt_content_type=content_type,
tgt_object_id=object_id,
active=True).as_template_objects(request, relation_type='source')
return render(request, "fir_relations/relations_sidebar.html",
{'references': references, 'referenced_by': referenced_by})
@login_required
@user_passes_test(is_incident_handler)
def remove_relation(request, relation_id):
if request.method == "POST":
relation = get_object_or_404(Relation, pk=relation_id)
if hasattr(relation.source, 'has_perm') and \
relation.source.has_perm(request.user, 'incidents.handle_incidents'):
relation.active = False
relation.save()
return HttpResponse(dumps({'status': 'success'}), content_type="application/json")
raise PermissionDenied
| gpl-3.0 | -7,668,384,517,257,130,000 | 45.457143 | 109 | 0.659902 | false | 4.256545 | false | false | false |
coshkun/6.00.1x-MITx-Course-Training-Lab-Notes | snippets/deep-reverse-funtion.midterm-exam-Problem7.py | 1 | 1186 | # -*- coding: utf-8 -*-
"""
Created on Mon Feb 13 19:04:45 2017
@author: coskun
Midterm Exam > Problem 6
15.0 points possible (graded)
Implement a function that meets the specifications below.
For example, if L = [[1, 2], [3, 4], [5, 6, 7]]
then deep_reverse(L) mutates L to be [[7, 6, 5], [4, 3], [2, 1]]
"""
#Sample Hand Input
L = [[1, 2], [3, 4], [5, 6, 7]]
def deep_reverse_copy(L):
""" assumes L is a list of lists whose elements are ints
Mutates L such that it reverses its elements and also
reverses the order of the int elements in every element of L.
It does not return anything.
"""
# Your code here
R = L[::-1]
mSize = len(L)
for i in range(mSize):
R[i] = R[i][::-1]
return R
def deep_reverse(L): # This will be the correct answer for test case
""" assumes L is a list of lists whose elements are ints
Mutates L such that it reverses its elements and also
reverses the order of the int elements in every element of L.
It does not return anything.
"""
# Your code here
L.reverse()
mSize = len(L)
for i in range(mSize):
L[i] = L[i][::-1]
print(L)
deep_reverse(L)
print(L) | mit | 2,050,933,538,805,303,000 | 24.804348 | 69 | 0.616358 | false | 3.137566 | false | false | false |
tjake/cassandra-dtest | paxos_tests.py | 2 | 7208 | # coding: utf-8
from dtest import Tester
from tools import since, no_vnodes
from assertions import assert_unavailable
from cassandra import ConsistencyLevel, WriteTimeout
from cassandra.query import SimpleStatement
import time
from threading import Thread
from ccmlib.cluster import Cluster
@since('2.0.6')
class TestPaxos(Tester):
def prepare(self, ordered=False, create_keyspace=True, use_cache=False, nodes=1, rf=1):
cluster = self.cluster
if (ordered):
cluster.set_partitioner("org.apache.cassandra.dht.ByteOrderedPartitioner")
if (use_cache):
cluster.set_configuration_options(values={ 'row_cache_size_in_mb' : 100 })
cluster.populate(nodes).start()
node1 = cluster.nodelist()[0]
time.sleep(0.2)
cursor = self.patient_cql_connection(node1, version="3.0.0")
if create_keyspace:
self.create_ks(cursor, 'ks', rf)
return cursor
def replica_availability_test(self):
#See CASSANDRA-8640
session = self.prepare(nodes=3, rf=3)
session.execute("CREATE TABLE test (k int PRIMARY KEY, v int)")
session.execute("INSERT INTO test (k, v) VALUES (0, 0) IF NOT EXISTS")
self.cluster.nodelist()[2].stop()
session.execute("INSERT INTO test (k, v) VALUES (1, 1) IF NOT EXISTS")
self.cluster.nodelist()[1].stop()
assert_unavailable(session.execute, "INSERT INTO test (k, v) VALUES (2, 2) IF NOT EXISTS")
self.cluster.nodelist()[1].start()
session.execute("INSERT INTO test (k, v) VALUES (3, 3) IF NOT EXISTS")
self.cluster.nodelist()[2].start()
session.execute("INSERT INTO test (k, v) VALUES (4, 4) IF NOT EXISTS")
@no_vnodes()
def cluster_availability_test(self):
#Warning, a change in partitioner or a change in CCM token allocation
#may require the partition keys of these inserts to be changed.
#This must not use vnodes as it relies on assumed token values.
session = self.prepare(nodes=3)
session.execute("CREATE TABLE test (k int PRIMARY KEY, v int)")
session.execute("INSERT INTO test (k, v) VALUES (0, 0) IF NOT EXISTS")
self.cluster.nodelist()[2].stop()
session.execute("INSERT INTO test (k, v) VALUES (1, 1) IF NOT EXISTS")
self.cluster.nodelist()[1].stop()
session.execute("INSERT INTO test (k, v) VALUES (3, 2) IF NOT EXISTS")
self.cluster.nodelist()[1].start()
session.execute("INSERT INTO test (k, v) VALUES (5, 5) IF NOT EXISTS")
self.cluster.nodelist()[2].start()
session.execute("INSERT INTO test (k, v) VALUES (6, 6) IF NOT EXISTS")
def contention_test_multi_iterations(self):
self._contention_test(8, 100)
##Warning, this test will require you to raise the open
##file limit on OSX. Use 'ulimit -n 1000'
def contention_test_many_threds(self):
self._contention_test(300, 1)
def _contention_test(self, threads, iterations):
""" Test threads repeatedly contending on the same row """
verbose = False
cursor = self.prepare(nodes=3)
cursor.execute("CREATE TABLE test (k int, v int static, id int, PRIMARY KEY (k, id))")
cursor.execute("INSERT INTO test(k, v) VALUES (0, 0)");
class Worker(Thread):
def __init__(self, wid, cursor, iterations, query):
Thread.__init__(self)
self.wid = wid
self.iterations = iterations
self.query = query
self.cursor = cursor
self.errors = 0
self.retries = 0
def run(self):
global worker_done
i = 0
prev = 0
while i < self.iterations:
done = False
while not done:
try:
res = self.cursor.execute(self.query, (prev+1, prev, self.wid ))
if verbose:
print "[%3d] CAS %3d -> %3d (res: %s)" % (self.wid, prev, prev+1, str(res))
if res[0][0] is True:
done = True
prev = prev + 1
else:
self.retries = self.retries + 1
# There is 2 conditions, so 2 reasons to fail: if we failed because the row with our
# worker ID already exists, it means we timeout earlier but our update did went in,
# so do consider this as a success
prev = res[0][3]
if res[0][2] is not None:
if verbose:
print "[%3d] Update was inserted on previous try (res = %s)" % (self.wid, str(res))
done = True
except WriteTimeout as e:
if verbose:
print "[%3d] TIMEOUT (%s)" % (self.wid, str(e))
# This means a timeout: just retry, if it happens that our update was indeed persisted,
# we'll figure it out on the next run.
self.retries = self.retries + 1
except Exception as e:
if verbose:
print "[%3d] ERROR: %s" % (self.wid, str(e))
self.errors = self.errors + 1
done = True
i = i + 1
# Clean up for next iteration
while True:
try:
self.cursor.execute("DELETE FROM test WHERE k = 0 AND id = %d IF EXISTS" % self.wid)
break;
except WriteTimeout as e:
pass
nodes = self.cluster.nodelist()
workers = []
c = self.patient_cql_connection(nodes[0], version="3.0.0", keyspace='ks')
q = c.prepare("""
BEGIN BATCH
UPDATE test SET v = ? WHERE k = 0 IF v = ?;
INSERT INTO test (k, id) VALUES (0, ?) IF NOT EXISTS;
APPLY BATCH
""")
for n in range(0, threads):
workers.append(Worker(n, c, iterations, q))
start = time.time()
for w in workers:
w.start()
for w in workers:
w.join()
if verbose:
runtime = time.time() - start
print "runtime:", runtime
query = SimpleStatement("SELECT v FROM test WHERE k = 0", consistency_level=ConsistencyLevel.ALL)
rows = cursor.execute(query)
value = rows[0][0]
errors = 0
retries = 0
for w in workers:
errors = errors + w.errors
retries = retries + w.retries
assert (value == threads * iterations) and (errors == 0), "value=%d, errors=%d, retries=%d" % (value, errors, retries)
| apache-2.0 | -6,002,720,541,554,961,000 | 38.604396 | 126 | 0.516232 | false | 4.355287 | true | false | false |
hpparvi/PyTransit | notebooks/contamination/src/blendlpf.py | 1 | 3425 | # PyTransit: fast and easy exoplanet transit modelling in Python.
# Copyright (C) 2010-2019 Hannu Parviainen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from numpy import ceil, sqrt, where, inf
from matplotlib.pyplot import subplots
from pytransit.contamination import TabulatedFilter, Instrument, SMContamination
from pytransit.contamination.filter import sdss_g, sdss_r, sdss_i, sdss_z
from pytransit.lpf.cntlpf import PhysContLPF
from pytransit.param import NormalPrior as NP
from .mocklc import MockLC
class MockLPF(PhysContLPF):
def __init__(self, name: str, lc: MockLC):
super().__init__(name, passbands=lc.pb_names, times=lc.npb * [lc.time],
fluxes=list(lc.flux.T), pbids=list(range(lc.npb)))
self._lc = lc
self.know_host = lc.setup.know_host
self.misidentify_host = lc.setup.misidentify_host
self.hteff = lc.hteff if not self.misidentify_host else lc.cteff
self.cteff = lc.cteff
self.t0_bjd = 0.0
self.period = lc.p
self.sma = lc.a
self.inc = lc.i
self.k_apparent = lc.k_apparent
self.b = lc.b
self.set_prior(1, NP(lc.p, 1e-7))
if lc.setup.know_orbit:
self.set_prior(2, NP(5.0, 0.05))
self.set_prior(3, NP(lc.b, 0.01))
if lc.setup.know_host:
if lc.setup.misidentify_host:
self.set_prior(6, NP(self._lc.cteff, 10))
else:
self.set_prior(6, NP(self._lc.hteff, 10))
def _init_instrument(self):
"""Set up the instrument and contamination model."""
qe = TabulatedFilter('MockQE',
[300, 350, 500, 550, 700, 800, 1000, 1050],
[0.10, 0.20, 0.90, 0.96, 0.90, 0.75, 0.11, 0.05])
self.instrument = Instrument('MockInstrument', [sdss_g, sdss_r, sdss_i, sdss_z], (qe, qe, qe, qe))
self.cm = SMContamination(self.instrument, "i'")
self.lnpriors.append(lambda pv: where(pv[:, 4] < pv[:, 5], 0, -inf))
def plot_light_curves(self, ncols: int = 2, figsize: tuple = (13, 5)):
nrows = int(ceil(self.nlc) / ncols)
fig, axs = subplots(nrows, ncols, figsize=figsize, sharex='all', sharey='all', constrained_layout=True)
fmodel = self.flux_model(self.de.population)[self.de.minimum_index]
for i, ax in enumerate(axs.flat):
ax.plot(self.times[i], self.fluxes[i], '.', alpha=0.25)
ax.plot(self.times[i], fmodel[self.lcslices[i]], 'k')
def posterior_samples(self, burn: int = 0, thin: int = 1, include_ldc: bool = False):
df = super().posterior_samples(burn, thin, include_ldc)
df['k_app'] = sqrt(df.k2_app)
df['k_true'] = sqrt(df.k2_true)
df['cnt'] = 1. - df.k2_app / df.k2_true
return df
| gpl-2.0 | -3,812,059,101,771,225,600 | 41.283951 | 111 | 0.623358 | false | 3.060769 | false | false | false |
reimandlab/ActiveDriverDB | website/imports/sites/uniprot/importer.py | 1 | 8466 | import gzip
from abc import abstractmethod
from types import SimpleNamespace
from warnings import warn
from pandas import read_table, to_numeric, DataFrame, read_csv, concat
from helpers.parsers import parse_fasta_file
import imports.protein_data as importers
from imports.sites.site_importer import SiteImporter
class UniprotToRefSeqTrait:
default_mappings_path = 'data/HUMAN_9606_idmapping.dat.gz'
def __init__(self, mappings_path=None):
if not mappings_path:
mappings_path = self.default_mappings_path
self.mappings = self.load_mappings(mappings_path)
@staticmethod
def load_mappings(mappings_path):
header = ['uniprot', 'type', 'refseq']
mappings = read_table(mappings_path, names=header, converters={
# based on observations, if an accession is primary and
# there is only one splice variant, the sequence-related
# mappings are identified just as ACCESSION; if there are many
# splice variants, the canonical variant version is appended
# after a hyphen # (e.g. ACCESSION-4).
# Following converter appends '-1' to all accessions
# that have no hyphen to make the mapping easier.
'uniprot': lambda u: u if '-' in u else u + '-1'
}).query('type == "RefSeq_NT"')
mappings = mappings[mappings.refseq.str.startswith('NM_')]
# drop refseq version
mappings['refseq'], _ = mappings['refseq'].str.split('.', 1).str
mappings.dropna(inplace=True)
mappings = mappings.drop(columns=['type'])
# after removing refseq version, we might get duplicates
mappings = mappings.drop_duplicates()
return mappings
def add_nm_refseq_identifiers(self, sites: DataFrame):
return sites.merge(
self.mappings,
left_on='sequence_accession',
right_on='uniprot'
)
class UniprotIsoformsTrait:
default_path_canonical = 'data/uniprot_sprot.fasta.gz'
default_path_splice = 'data/uniprot_sprot_varsplic.fasta.gz'
def __init__(
self, sprot_canonical_path=None,
sprot_splice_variants_path=None,
):
self.sequences = self.load_sequences(
sprot_canonical_path or self.default_path_canonical,
sprot_splice_variants_path or self.default_path_splice
)
@staticmethod
def load_sequences(canonical_path, splice_variants_path):
all_sequences = {}
groups = {'canonical': canonical_path, 'splice': splice_variants_path}
for isoform_group, path in groups.items():
sequences = {}
def append(protein_id, line):
sequences[protein_id] += line
def on_header(header):
protein_id = header.split('|')[1]
sequences[protein_id] = ''
return protein_id
parse_fasta_file(path, append, on_header, file_opener=gzip.open, mode='rt')
all_sequences[isoform_group] = sequences
return SimpleNamespace(**all_sequences)
def is_isoform_canonical(self, isoform: str) -> bool:
if isoform in self.sequences.splice:
return False
if isoform in self.sequences.canonical or isoform.endswith('-1'):
return True
def get_sequence_of_protein(self, site):
"""Return sequence of a protein on which the site is described.
Having no information describing which isoform is canonical
the best way to determine which isoform to use is to check if
an isoform is a splice variant; if it is not a splice variant,
we know that it has to be a canonical isoform.
"""
try:
return self.sequences.splice[site.sequence_accession]
except KeyError:
if hasattr(site, 'primary_accession'):
primary_accession = site.primary_accession
elif site.sequence_accession.endswith('-1'):
primary_accession = site.sequence_accession[:-2]
else:
return
try:
return self.sequences.canonical[primary_accession]
except KeyError:
warn(f'No sequence for {site.sequence_accession} found!')
class UniprotSequenceAccessionTrait:
def add_sequence_accession(self, sites):
self.mappings['is_canonical'] = self.mappings.uniprot.apply(self.is_isoform_canonical)
canonical_mapping = self.mappings.query('is_canonical == True')
canonical_mapping['protein_accession'], _ = canonical_mapping['uniprot'].str.split('-', 1).str
canonical_mapping.rename(columns={'uniprot': 'sequence_accession'}, inplace=True)
canonical_mapping.drop(columns=['refseq'], inplace=True)
canonical_mapping = canonical_mapping.drop_duplicates()
return sites.merge(canonical_mapping, on='protein_accession')
class UniprotImporter(UniprotToRefSeqTrait, UniprotIsoformsTrait, SiteImporter):
"""UniProt/SwissProt sites importer.
The data can be exported and downloaded using sparql: http://sparql.uniprot.org,
but for convenience check the pre-baked URLS in `download.sh`
Relevant terms definition are available at: http://www.uniprot.org/docs/ptmlist
The sparql code is available in `uniprot.sparql` file.
Only reviewed entries (SwissProt) are considered.
Many thanks to the author of https://www.biostars.org/p/261823/
for describing how to use sparql to export PTM data from UniProt.
Maps sites by isoform; fallback to gene names
can be implemented by altering sparql query.
"""
requires = {importers.proteins_and_genes, importers.sequences}
requires.update(SiteImporter.requires)
source_name = 'UniProt'
@property
@abstractmethod
def default_path(self) -> str:
"""Default path to the csv file with site data"""
def __init__(self, sprot_canonical_path=None, sprot_splice_variants_path=None, mappings_path=None):
SiteImporter.__init__(self)
UniprotToRefSeqTrait.__init__(self, mappings_path)
UniprotIsoformsTrait.__init__(self, sprot_canonical_path, sprot_splice_variants_path)
@abstractmethod
def extract_site_mod_type(self, sites: DataFrame) -> DataFrame:
"""Extract site type information into additional columns.
Following columns have to be returned: mod_type, residue.
"""
def filter_sites(self, sites: DataFrame) -> DataFrame:
# remove variant-specific modifications
sites = sites[~sites['modifiers'].str.contains('in variant', na=False)]
# and those which are not common
sites = sites[~sites['modifiers'].str.contains('atypical', na=False)]
# see: http://www.uniprot.org/help/evidences
# ECO_0000269 = Experimental evidence
sites = sites[sites['eco'] == 'ECO_0000269']
return sites
def load_sites(self, path=None, **filters):
if not path:
path = self.default_path
sites = read_csv(path)
sites.columns = [column.strip() for column in sites.columns]
sites.position = to_numeric(sites.position.str.replace(r'\^.*', ''))
extracted_data = self.extract_site_mod_type(sites)
if sites.source.any():
sites['pub_med_ids'] = (
sites.source.where(sites.source.str.match(r'http://purl.uniprot.org/citations/\d+$'))
.str.replace('http://purl.uniprot.org/citations/', '')
)
sites['pub_med_ids'] = sites['pub_med_ids'].apply(lambda x: [int(x)] if x == x else None)
else:
warn('No site source data')
sites['pub_med_ids'] = None
sites.drop(columns=['data', 'source'], inplace=True)
sites = concat([sites, extracted_data], axis=1)
sites = self.filter_sites(sites)
# only chosen site types
sites = sites[sites.mod_type.isin(self.site_types)]
# map uniprot to refseq:
sites = self.add_nm_refseq_identifiers(sites)
mapped_sites = self.map_sites_to_isoforms(sites)
return self.create_site_objects(mapped_sites, ['refseq', 'position', 'residue', 'mod_type', 'pub_med_ids'])
def repr_site(self, site):
return f'{site.sequence_accession}: ' + super().repr_site(site)
@staticmethod
def split_kinases(kinases):
return kinases.str.split(' (?:and|AND|or|OR) ')
| lgpl-2.1 | -4,429,928,574,217,603,600 | 33.983471 | 115 | 0.637491 | false | 3.887052 | false | false | false |
shadyueh/pyranking | ranking/settings.py | 1 | 3310 | """
Django settings for ranking project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^@-4*3*_y!92n8*hq)!ouv0!%%crivp2ko#q))#tfi&fcb-b=3'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework'
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAdminUser',),
'PAGE_SIZE': 100
}
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ranking.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ranking.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| mit | 4,699,871,452,040,584,000 | 25.062992 | 91 | 0.687009 | false | 3.525027 | false | false | false |
rokgerzelj/taskifier-cortex | model.py | 1 | 2685 | '''
Sentence model, inspired by the Keras VAE tutorial.
Reference: "Auto-Encoding Variational Bayes" https://arxiv.org/abs/1312.6114
'''
import numpy as np
import tensorflow as tf
from keras.layers import Input, Embedding, Dense, Lambda, LSTM, RepeatVector, TimeDistributed
from keras.models import Model
from keras import backend as K
from keras import objectives
from keras.preprocessing import text, sequence
from utils import load_data, to_one_hot
batch_size = 30
nb_epoch = 50
max_sequence_len = 25 # Required for tensorflow!
lstm_state_dim = 256
latent_dim = 128
intermediate_dim = 256
text_input = Input(shape=(max_sequence_len,), dtype='int32', name='text_input')
embedding = Embedding(1000, 64, input_length=max_sequence_len, mask_zero=True, dropout=0.3)(text_input)
rnn_encoded = LSTM(lstm_state_dim, dropout_W=0.3)(embedding)
h = Dense(intermediate_dim, activation='relu')(rnn_encoded)
z_mean = Dense(latent_dim, name="z_mean_dense")(h)
z_log_var = Dense(latent_dim, name="z_log_var_dense")(h)
def sampling(args):
z_mean, z_log_var = args
epsilon = K.random_normal(shape=(batch_size, latent_dim), mean=0.)
return z_mean + K.exp(z_log_var / 2) * epsilon
z = Lambda(sampling)([z_mean, z_log_var])
h_decoded = Dense(intermediate_dim, activation='relu', name="h_decoded_dense", input_shape=(latent_dim,))(z)
x_decoded_mean = Dense(lstm_state_dim, activation='relu', name="x_decoded_mean_dense")(h_decoded)
rnn_decoded = LSTM(1000, return_sequences=True, dropout_W=0.3)(RepeatVector(max_sequence_len)(x_decoded_mean))
text_output = TimeDistributed(Dense(1000, activation='softmax'))(rnn_decoded)
def vae_loss(text_true_onehot, text_predicted_onehot):
xent_loss = K.mean(objectives.categorical_crossentropy(text_true_onehot, text_predicted_onehot), axis=-1)
kl_loss = - 0.5 * K.sum(1 + z_log_var - K.square(z_mean) - K.exp(z_log_var), axis=-1)
return xent_loss + kl_loss
train, dev, scores = load_data()
x_train = [text.one_hot(x, n=1000, lower=True, split=" ") for x in train[0]]
x_train = sequence.pad_sequences(x_train, maxlen=max_sequence_len).astype(int)
#print(x_train[:2])
x_train_one_hot = np.asarray(list(map(to_one_hot, x_train)))
#print(x_train_one_hot[:2])
vae = Model(text_input, text_output)
vae.compile(optimizer='adam', loss=vae_loss, metrics=['accuracy'])
vae.fit(x_train, x_train_one_hot,
shuffle=True,
nb_epoch=nb_epoch,
batch_size=batch_size)
vae.save("model-latest.keras")
#vae.evaluate(x_test, x_test, batch_size=256)
# build a model to project inputs on the latent space
#encoder = Model(text_input, z_mean)
| mit | 9,200,981,815,651,468,000 | 32.87013 | 110 | 0.692365 | false | 2.853348 | false | false | false |
zygmuntz/kaggle-bestbuy_big | train.py | 1 | 1402 | 'http://fastml.com/best-buy-mobile-contest-big-data/'
import sys, csv, re
from collections import defaultdict
def prepare( query ):
query = re.sub( r'\W', '', query )
query = query.lower()
return query
input_file = sys.argv[1]
test_file = sys.argv[2]
benchmark_file = sys.argv[3]
output_file = sys.argv[4]
i = open( input_file )
reader = csv.reader( i )
t = open( test_file )
b = open( benchmark_file )
headers = reader.next()
mapping = defaultdict( lambda: {} )
counter = 0
for line in reader:
query = prepare( line[3] )
sku = line[1]
# print "%s -> %s" % ( query, sku )
try:
mapping[query][sku] += 1
except KeyError:
mapping[query][sku] = 1
counter += 1
if counter % 100000 == 0:
print counter
reader = csv.reader( t )
headers = reader.next()
bench_reader = csv.reader( b, delimiter = " " )
headers = bench_reader.next()
o = open( output_file, 'wb' )
writer = csv.writer( o, delimiter = " " )
n = 0
m = 0
for line in reader:
n += 1
query = prepare( line[2] )
popular_skus = bench_reader.next()
if query in mapping:
m += 1
skus = []
for sku in sorted( mapping[query], key=mapping[query].get, reverse = True ):
skus.append( sku )
skus.extend( popular_skus )
skus = skus[0:5]
else:
skus = popular_skus
writer.writerow( skus )
# counter
if n % 10000 == 0:
print n
print "Used mapping in %s / %s (%s)" % ( m, n, 1.0 * m / n ) | mpl-2.0 | -5,028,400,967,993,399,000 | 16.759494 | 78 | 0.616262 | false | 2.660342 | false | false | false |
NSLS-II-CHX/ipython_ophyd | startup/10-optics.py | 2 | 10609 | from ophyd import (EpicsMotor, PVPositioner, Device, EpicsSignal,
EpicsSignalRO,PVPositionerPC)
from ophyd import (Component as Cpt, FormattedComponent,
DynamicDeviceComponent as DDC)
#gap
#und_gap = 'SR:C11-ID:G1{IVU20:1-Mtr:2}' #SR:C11-ID:G1{IVU20:1-Mtr:2}Inp:Pos ??
class MotorCenterAndGap(Device):
"Center and gap using Epics Motor records"
xc = Cpt(EpicsMotor, '-Ax:XCtr}Mtr')
yc = Cpt(EpicsMotor, '-Ax:YCtr}Mtr')
xg = Cpt(EpicsMotor, '-Ax:XGap}Mtr')
yg = Cpt(EpicsMotor, '-Ax:YGap}Mtr')
@property
def hints(self):
fields = []
for name in self.component_names:
motor = getattr(self, name)
fields.extend(motor.hints['fields'])
return {'fields': fields}
class VirtualGap(PVPositioner):
readback = Cpt(EpicsSignalRO, 't2.C')
setpoint = Cpt(EpicsSignal, 'size')
done = Cpt(EpicsSignalRO, 'DMOV')
done_value = 1
class VirtualCenter(PVPositioner):
readback = Cpt(EpicsSignalRO, 't2.D')
setpoint = Cpt(EpicsSignal, 'center')
done = Cpt(EpicsSignalRO, 'DMOV')
done_value = 1
class VirtualMotorCenterAndGap(Device):
"Center and gap with virtual motors"
xc = Cpt(VirtualCenter, '-Ax:X}')
yc = Cpt(VirtualCenter, '-Ax:Y}')
xg = Cpt(VirtualGap, '-Ax:X}')
yg = Cpt(VirtualGap, '-Ax:Y}')
class Blades(Device):
top = Cpt(EpicsMotor, '-Ax:T}Mtr')
bottom = Cpt(EpicsMotor, '-Ax:B}Mtr')
outboard = Cpt(EpicsMotor, '-Ax:O}Mtr')
inboard = Cpt(EpicsMotor, '-Ax:I}Mtr')
class MotorSlits(Blades, MotorCenterAndGap):
"combine t b i o and xc yc xg yg"
pass
class VirtualMotorSlits(Blades, VirtualMotorCenterAndGap):
"combine t b i o and xc yc xg yg"
# def __init__(self, *args, **kwargs):
# super().__init__(*args, **kwargs)
# self.xc.readback.name = self.name
# self.yc.readback.name = self.name
# self.xg.readback.name = self.name
pass
class XYMotor(Device):
x = Cpt(EpicsMotor, '-Ax:X}Mtr')
y = Cpt(EpicsMotor, '-Ax:Y}Mtr')
class XYThetaMotor(XYMotor):
"used for GI mirror"
th = Cpt(EpicsMotor, '-Ax:Th}Mtr')
class HorizontalDiffractionMirror(XYMotor):
"x and y with pitch, which has different read and write PVs"
#p = FormattedComponent(EpicsSignal, read_pv='{self.prefix}-Ax:P}}E-I', write_pv='{self.prefix}-Ax:P}}E-SP', add_prefix=('read_pv', 'write_pv', 'suffix'))
p = FormattedComponent(EpicsSignal, read_pv='{self.prefix}-Ax:P}}Pos-I', write_pv='{self.prefix}-Ax:P}}PID-SP', add_prefix=('read_pv', 'write_pv', 'suffix'))
# for some reason we cannot scan on E-SP. This is the actual piezo voltage (max 100) while our 'usual values' are converted to urad by some other laye rof logic in the ioc
# the currrent SP is the input of the PID feedback loop. This requitred the feedback loop to be turned ON
class DCM(Device):
en = Cpt(EpicsMotor, '-Ax:Energy}Mtr')
b = Cpt(EpicsMotor, '-Ax:B}Mtr')
r = Cpt(EpicsMotor, '-Ax:R}Mtr')
x = Cpt(EpicsMotor, '-Ax:X}Mtr')
fp = Cpt(EpicsMotor, '-Ax:FP}Mtr')
p = Cpt(EpicsMotor, '-Ax:P}Mtr')
class SAXSBeamStop( Device):
x = Cpt( EpicsMotor, '-Ax:X}Mtr' )
y1 = Cpt( EpicsMotor, '-Ax:YFT}Mtr')
x2 = Cpt( EpicsMotor, '-Ax:XFB}Mtr')
y2 = Cpt( EpicsMotor, '-Ax:YFB}Mtr')
@property
def hints(self):
fields = []
for name in self.component_names:
motor = getattr(self, name)
fields.extend(motor.hints['fields'])
return {'fields': fields}
class DMM(Device):
# en = Cpt(EpicsMotor, '-Ax:Energy}Mtr')
b = Cpt(EpicsMotor, '-Ax:B}Mtr')
r = Cpt(EpicsMotor, '-Ax:R}Mtr')
x = Cpt(EpicsMotor, '-Ax:X}Mtr')
y = Cpt(EpicsMotor, '-Ax:Y}Mtr')
fp = Cpt(EpicsMotor, '-Ax:FP}Mtr')
class Transfocator(Device):
crl = DDC({'num%d' % i: (EpicsMotor, '%d-Ax:X}Mtr' % i, {})
for i in range(1, 9)})
x = Cpt(EpicsMotor, 'Ves-Ax:X}Mtr')
y = Cpt(EpicsMotor, 'Ves-Ax:Y}Mtr')
z = Cpt(EpicsMotor, 'Ves-Ax:Z}Mtr')
ph = Cpt(EpicsMotor, 'Ves-Ax:Ph}Mtr')
th = Cpt(EpicsMotor, 'Ves-Ax:Th}Mtr')
class Kinoform(Device):
z = Cpt(EpicsMotor, '-Ax:ZB}Mtr')
x = Cpt(EpicsMotor, '-Ax:XB}Mtr')
y = Cpt(EpicsMotor, '-Ax:YB}Mtr')
chi = Cpt(EpicsMotor, '-Ax:Ch}Mtr')
theta = Cpt(EpicsMotor, '-Ax:Th}Mtr')
phi = Cpt(EpicsMotor, '-Ax:Ph}Mtr')
lx = Cpt(EpicsMotor, '-Ax:XT}Mtr')
ly = Cpt(EpicsMotor, '-Ax:YT}Mtr')
class SmarPod_x(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:1}Pos-I')
setpoint = Cpt(EpicsSignal, '-Ax:1}Pos-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_x = SmarPod_x('XF:11IDB-ES{SPod:1',name='smp_x')
smp_x.readback.name = 'smp_x'
class SmarPod_y(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:3}Pos-I')
setpoint = Cpt(EpicsSignal, '-Ax:3}Pos-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_y = SmarPod_y('XF:11IDB-ES{SPod:1',name='smp_y')
smp_y.readback.name = 'smp_y'
class SmarPod_z(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:2}Pos-I')
setpoint = Cpt(EpicsSignal, '-Ax:2}Pos-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_z = SmarPod_z('XF:11IDB-ES{SPod:1',name='smp_z')
smp_z.readback.name = 'smp_z'
class SmarPod_rx(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:1}Rot-I')
setpoint = Cpt(EpicsSignal, '-Ax:1}Rot-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_rx = SmarPod_rx('XF:11IDB-ES{SPod:1',name='smp_rx')
smp_rx.readback.name = 'smp_rx'
class SmarPod_ry(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:3}Rot-I')
setpoint = Cpt(EpicsSignal, '-Ax:3}Rot-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_ry = SmarPod_ry('XF:11IDB-ES{SPod:1',name='smp_ry')
smp_ry.readback.name = 'smp_ry'
class SmarPod_rz(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-Ax:2}Rot-I')
setpoint = Cpt(EpicsSignal, '-Ax:2}Rot-SP')
actuate = Cpt(EpicsSignal, '}Move-Cmd')
actuate_value = 1
smp_rz = SmarPod_rz('XF:11IDB-ES{SPod:1',name='smp_rz')
smp_rz.readback.name = 'smp_rz'
class Diffractometer(Device):
Del= Cpt( EpicsMotor, '-Ax:Del}Mtr')
gam = Cpt(EpicsMotor, '-Ax:Gam}Mtr')
om = Cpt(EpicsMotor, '-Ax:Om}Mtr')
phi = Cpt(EpicsMotor, '-Ax:Ph}Mtr')
xb = Cpt(EpicsMotor, '-Ax:XB}Mtr')
yb = Cpt(EpicsMotor, '-Ax:YB}Mtr')
chh = Cpt(EpicsMotor, '-Ax:ChH}Mtr')
thh = Cpt(EpicsMotor, '-Ax:ThH}Mtr')
phh = Cpt(EpicsMotor, '-Ax:PhH}Mtr')
xh = Cpt(EpicsMotor, '-Ax:XH}Mtr')
yh = Cpt(EpicsMotor, '-Ax:YH2}Mtr')
zh = Cpt(EpicsMotor, '-Ax:ZH}Mtr')
chv = Cpt(EpicsMotor, '-Ax:ChV}Mtr')
thv = Cpt(EpicsMotor, '-Ax:ThV}Mtr')
xv = Cpt(EpicsMotor, '-Ax:XV}Mtr')
yv = Cpt(EpicsMotor, '-Ax:YV}Mtr')
zv = Cpt(EpicsMotor, '-Ax:ZV}Mtr')
xv2 = Cpt(EpicsMotor, '-Ax:XV2}Mtr')
@property
def hints(self):
fields = []
for name in self.component_names:
motor = getattr(self, name)
fields.extend(motor.hints['fields'])
return {'fields': fields}
class XBPM( Device):
vt = Cpt( EpicsSignal, 'CtrlDAC:BLevel-SP' )
xBPM =XBPM( 'XF:11IDB-BI{XBPM:02}', name = 'xBPM' )
diff = Diffractometer('XF:11IDB-ES{Dif', name='diff')
# sample beamstop
#sambst = XYMotor('XF:11IDB-OP{BS:Samp', name='sambst')
s1 = MotorCenterAndGap('XF:11IDB-OP{Slt:1', name='s1')
k1 = Kinoform('XF:11IDB-OP{Lens:1', name='k1') # upstream
k2 = Kinoform('XF:11IDB-OP{Lens:2', name='k2') # downstream
gi = XYThetaMotor('XF:11IDB-OP{Mir:GI', name='gi') # GI-mirror
s2 = MotorCenterAndGap('XF:11IDB-OP{Slt:2', name='s2') #Beam-defining (large JJ) slits
pbs = MotorSlits('XF:11IDA-OP{Slt:PB', name='pbs') # pink beam slits
flt_y = EpicsMotor('XF:11IDA-OP{Flt:1-Ax:Y}Mtr', name='flt_y') # filters
dcm = DCM('XF:11IDA-OP{Mono:DCM', name='dcm') #, check position, e.g., by dcm.b.user_readback.value
dmm = DMM('XF:11IDA-OP{Mono:DMM', name='dmm')
mbs = VirtualMotorSlits('XF:11IDA-OP{Slt:MB', name='mbs') # Mono-beam Slits, check position, e.g., by mbs.xc.readback.value
tran= Transfocator('XF:11IDA-OP{Lens:', name='tran') # Transfocator
s4 = MotorCenterAndGap('XF:11IDB-ES{Slt:4', name='s4') # temp guard slits
fsh_x=EpicsMotor('XF:11IDB-OP{FS:1-Ax:X}Mtr', name='fsh_x') # fast shutter positioner: X
fsh_y=EpicsMotor('XF:11IDB-OP{FS:1-Ax:Y}Mtr', name='fsh_y') # fast shutter positioner: Y
#smp =SmarPod('XF:11IDB-ES{SPod:1-',name='smp') # SmarPod
# Diagnostic Manipulators
foil_y = EpicsMotor('XF:11IDA-BI{Foil:Bpm-Ax:Y}Mtr', name='foil_y')
# foil_x for DBPM (note foil_y is for a different device, perhaps we should rename ...)
foil_x = EpicsMotor('XF:11IDB-OP{Mon:Foil-Ax:X}Mtr', name='foil_x')
#Sample chamber smaract linear stages
# Note crazy names only for Julien!!!
#amp = XYMotor('XF:11IDB-OP{BS:Sam', name='amp')
class amp_motor(Device):
#x = EpicsMotor('XF:11IDB-OP{BS:Sam-Ax:X}Mtr')
ampx = EpicsSignal('XF:11IDB-OP{BS:Samp-Ax:X}Mtr.VAL', name='ampx')
ampy = EpicsSignal('XF:11IDB-OP{Stg:Samp-Ax:Phi}Mtr.VAL', name='ampy')
ampz = EpicsSignal('XF:11IDB-OP{BS:Samp-Ax:Y}Mtr.VAL', name='ampz')
#caput('XF:11IDB-ES{Det:Eig4M}cam1:NumImages', fnum )
# SAXS table: WAXS section rotation
SAXS_x1 = EpicsMotor('XF:11IDB-ES{Tbl:SAXS-Ax:X1}Mtr',name='SAXS_x1')
SAXS_x2 = EpicsMotor('XF:11IDB-ES{Tbl:SAXS-Ax:X2}Mtr',name='SAXS_x2')
# Note inconsistency in capitalization of Bpm/BPM below.
bpm1 = XYMotor('XF:11IDA-BI{Bpm:1', name='bpm1')
bpm2 = XYMotor('XF:11IDB-BI{BPM:2', name='bpm2')
w1 = XYMotor('XF:11IDB-OP{Win:1', name='w1') # window positioners
hdm = HorizontalDiffractionMirror('XF:11IDA-OP{Mir:HDM', name='hdm')
gsl = VirtualMotorCenterAndGap('XF:11IDB-OP{Slt:Guard', name='gsl') #Guard rSlits (SmarAct)
#gsl = VirtualMotorSlits('XF:11IDB-OP{Slt:Guard', name='gsl') #Guard rSlits (SmarAct)
#SAXS beam stop
saxs_bst = SAXSBeamStop( 'XF:11IDB-ES{BS:SAXS', name = 'saxs_bst' )
#To solve the "KeyError Problem" when doing dscan and trying to save to a spec file, Y.G., 20170110
gsl.xc.readback.name = 'gsl_xc'
gsl.yc.readback.name = 'gsl_yc'
gsl.xg.readback.name = 'gsl_xg'
gsl.yg.readback.name = 'gsl_yg'
mbs.xc.readback.name = 'mbs_xc'
mbs.yc.readback.name = 'mbs_yc'
mbs.xg.readback.name = 'mbs_xg'
mbs.yg.readback.name = 'mbs_yg'
fe = VirtualMotorCenterAndGap('FE:C11A-OP{Slt:12', name='fe') # Front End Slits (Primary Slits)
fe.xc.readback.name = 'fe_xc'
fe.yc.readback.name = 'fe_yc'
fe.xg.readback.name = 'fe_xg'
fe.yg.readback.name = 'fe_yg'
| bsd-2-clause | -1,814,696,963,466,167,600 | 33.898026 | 175 | 0.639551 | false | 2.339876 | false | false | false |
zergov/flashcards | tests/test_study.py | 1 | 2997 | import unittest
import mock
from flashcards.sets import StudySet
from flashcards.cards import StudyCard
from flashcards import study
from flashcards.study import BaseStudySession
from flashcards.study import ShuffledStudySession
def create_study_set():
""" Create a simple study set for test purposes. """
cards = [
StudyCard('2 + 2 = ?', '4'),
StudyCard('2 + 3 = ?', '5'),
StudyCard('2 + 4 = ?', '6'),
StudyCard('2 + 5 = ?', '7')
]
study_set = StudySet('Basic Maths')
study_set._cards = cards
return study_set
def create_cards_list():
""" Create a simple list of cards for test purposes. """
cards = [
StudyCard('2 + 2 = ?', '4'),
StudyCard('2 + 3 = ?', '5'),
StudyCard('2 + 4 = ?', '6'),
StudyCard('2 + 5 = ?', '7')
]
return cards
class TestGetStudySessionTemplate(unittest.TestCase):
def test_get_study_session_template_default(self):
mode = 'awdiowad' # Something retarded that is not in the mode options
session = study.get_study_session_template(mode)
self.assertIsInstance(session, BaseStudySession)
def test_get_study_session_template_None_input(self):
mode = None # user did not supply any option --mode'
session = study.get_study_session_template(mode)
self.assertIsInstance(session, BaseStudySession)
def test_get_study_session_template_basic(self):
mode = 'linear' # user entered `linear` as --mode option.'
session = study.get_study_session_template(mode)
self.assertIsInstance(session, BaseStudySession)
def test_get_study_session_template_shuffled(self):
mode = 'shuffled' # user entered `shuffled` as --mode option.
session = study.get_study_session_template(mode)
self.assertIsInstance(session, ShuffledStudySession)
class TestBasicStudyStrategy(unittest.TestCase):
def test_studySession_start(self):
mock_show_question = mock.Mock()
mock_show_answer = mock.Mock()
study_set = create_study_set()
session = BaseStudySession()
session.show_question = mock_show_question
session.show_answer = mock_show_answer
session.start(study_set)
self.assertEqual(4, mock_show_question.call_count)
self.assertEqual(4, mock_show_answer.call_count)
class TestShuffledStudyStrategy(unittest.TestCase):
@mock.patch('flashcards.study.random.shuffle')
def test_cards_are_shuffled(self, mock_shuffle):
mock_show_question = mock.Mock()
mock_show_answer = mock.Mock()
study_set = create_study_set()
session = ShuffledStudySession()
session.show_question = mock_show_question
session.show_answer = mock_show_answer
session.start(study_set)
self.assertEqual(1, mock_shuffle.call_count)
self.assertEqual(4, mock_show_question.call_count)
self.assertEqual(4, mock_show_answer.call_count)
| mit | 3,765,280,307,964,986,000 | 27.817308 | 79 | 0.650651 | false | 3.628329 | true | false | false |
VitalPet/c2c-rd-addons | survey_multi_lang/survey.py | 4 | 3889 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 ChriCar Beteiligungs- und Beratungs- GmbH (<http://www.camptocamp.at>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
def _lang_get(self, cr, uid, context=None):
lang_pool = self.pool.get('res.lang')
ids = lang_pool.search(cr, uid, [], context=context)
res = lang_pool.read(cr, uid, ids, ['code', 'name'], context)
return [(r['code'], r['name']) for r in res]
class survey_survey(osv.osv):
_inherit = 'survey.survey'
_columns = {
'title': fields.char('Survey Title', size=255, required=1, translate=True),
'note': fields.text('Description', translate=True),
'lang': fields.selection(_lang_get, 'Language to print',
help="If the selected language is loaded in the system, the survey will be printed in this language."),
}
survey_survey()
class survey_page(osv.osv):
_inherit = 'survey.page'
_columns = {
'title': fields.char('Page Title', size=255, required=1, translate=True),
'note': fields.text('Description', translate=True),
}
survey_page()
class survey_question(osv.osv):
_inherit = 'survey.question'
_columns = {
'question': fields.char('Question', size=255, required=1, translate=True),
'req_error_msg': fields.text('Error Message', translate=True),
'descriptive_text': fields.text('Descriptive Text', size=255, translate=True),
'comment_label': fields.char('Field Label', size = 255, translate=True),
'comment_valid_err_msg': fields.text('Error message', translate=True),
'make_comment_field_err_msg': fields.text('Error message', translate=True),
'validation_valid_err_msg': fields.text('Error message', translate=True),
'numeric_required_sum_err_msg': fields.text('Error message', translate=True),
'column_name': fields.char('Column Name',size=256, translate=True),
}
survey_question()
# FIXME 20140923 - class missin
#class survey_question_column_heading(osv.osv):
# _inherit = 'survey.question.column.heading'
#
# _columns = {
# 'title': fields.char('Column Heading', size=128, required=1, translate=True),
# 'menu_choice': fields.text('Menu Choice', translate=True),
# }
#
#survey_question_column_heading()
#class survey_answer(osv.osv):
# _inherit = 'survey.answer'
#
# _columns = {
# 'answer': fields.char('Answer', size=255, required=1, translate=True),
# 'menu_choice': fields.text('Menu Choices', translate=True),
# 'question_answer_int': fields.integer('Question Answer ID unique'),
# }
#
#survey_answer()
#class survey_response_line(osv.osv):
# _inherit = 'survey.response.line'
#
# _columns = {
# 'comment': fields.text('Notes', translate=True),
# 'single_text': fields.char('Text', size=255, translate=True),
# }
#
#survey_response_line()
| agpl-3.0 | -3,906,373,471,924,969,000 | 38.282828 | 115 | 0.620468 | false | 3.648218 | false | false | false |
angr/angr | angr/analyses/decompiler/optimization_passes/__init__.py | 1 | 1733 | # pylint:disable=import-outside-toplevel
from typing import Optional, Union
from archinfo import Arch
from .optimization_pass import OptimizationPassStage
from .stack_canary_simplifier import StackCanarySimplifier
from .base_ptr_save_simplifier import BasePointerSaveSimplifier
from .multi_simplifier import MultiSimplifier
from .div_simplifier import DivSimplifier
from .mod_simplifier import ModSimplifier
from .eager_returns import EagerReturnsSimplifier
from .const_derefs import ConstantDereferencesSimplifier
from .register_save_area_simplifier import RegisterSaveAreaSimplifier
_all_optimization_passes = [
(RegisterSaveAreaSimplifier, True),
(StackCanarySimplifier, True),
(BasePointerSaveSimplifier, True),
(EagerReturnsSimplifier, True),
(DivSimplifier, True),
(MultiSimplifier, True),
(ModSimplifier, True),
(ConstantDereferencesSimplifier, True),
]
def get_optimization_passes(arch, platform):
if isinstance(arch, Arch):
arch = arch.name
if platform is not None:
platform = platform.lower()
passes = [ ]
for pass_, _ in _all_optimization_passes:
if arch in pass_.ARCHES and (platform is None or platform in pass_.PLATFORMS):
passes.append(pass_)
return passes
def get_default_optimization_passes(arch: Union[Arch,str], platform: Optional[str]):
if isinstance(arch, Arch):
arch = arch.name
if platform is not None:
platform = platform.lower()
passes = [ ]
for pass_, default in _all_optimization_passes:
if not default:
continue
if arch in pass_.ARCHES and (platform is None or platform in pass_.PLATFORMS):
passes.append(pass_)
return passes
| bsd-2-clause | -8,471,061,842,043,066,000 | 28.372881 | 86 | 0.720138 | false | 3.687234 | false | false | false |
edx/ecommerce-worker | ecommerce_worker/cache.py | 1 | 1699 | """
This file contains a primitive cache
"""
from __future__ import absolute_import
import threading
import time
lock = threading.Lock() # pylint: disable=invalid-name
class CacheObject:
"""Object saved in cache"""
def __init__(self, value, duration):
self.value = value
self.expire = time.time() + duration
class Cache(dict):
"""
Primitive key/value cache. Entries are kept in a dict with an expiration.
When a get of an expired entry is done, the cache is cleaned of all expired entries.
Locking is used for thread safety
"""
def get(self, key):
"""Get an object from the cache
Arguments:
key (str): Cache key
Returns:
Cached object
"""
lock.acquire()
try:
if key not in self:
return None
current_time = time.time()
if self[key].expire > current_time:
return self[key].value
# expired key, clean out all expired keys
deletes = []
for k, val in self.items():
if val.expire <= current_time:
deletes.append(k)
for k in deletes:
del self[k]
return None
finally:
lock.release()
def set(self, key, value, duration):
"""Save an object in the cache
Arguments:
key (str): Cache key
value (object): object to cache
duration (int): time in seconds to keep object in cache
"""
lock.acquire()
try:
self[key] = CacheObject(value, duration)
finally:
lock.release()
| agpl-3.0 | -3,115,333,913,301,735,400 | 24.358209 | 88 | 0.537964 | false | 4.591892 | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.