text
stringlengths 4
1.02M
| meta
dict |
---|---|
import time
# from mufsim.errors import MufRuntimeError
from mufsim.insts.base import Instruction, instr
@instr("date")
class InstDate(Instruction):
def execute(self, fr):
when = time.localtime()
fr.data_push(int(when.tm_mday))
fr.data_push(int(when.tm_mon))
fr.data_push(int(when.tm_year))
@instr("time")
class InstTime(Instruction):
def execute(self, fr):
when = time.localtime()
fr.data_push(int(when.tm_sec))
fr.data_push(int(when.tm_min))
fr.data_push(int(when.tm_hour))
@instr("gmtoffset")
class InstGmtOffset(Instruction):
def execute(self, fr):
fr.data_push(-time.timezone)
@instr("timesplit")
class InstTimeSplit(Instruction):
def execute(self, fr):
secs = fr.data_pop(int)
when = time.localtime(secs)
fr.data_push(int(when.tm_sec))
fr.data_push(int(when.tm_min))
fr.data_push(int(when.tm_hour))
fr.data_push(int(when.tm_mday))
fr.data_push(int(when.tm_mon))
fr.data_push(int(when.tm_year))
fr.data_push(int(when.tm_wday) + 1)
fr.data_push(int(when.tm_yday))
@instr("timefmt")
class InstTimeFmt(Instruction):
def execute(self, fr):
fr.check_underflow(2)
when = fr.data_pop(int)
fmt = fr.data_pop(str)
when = time.localtime(when)
fr.data_push(time.strftime(fmt, when))
@instr("systime")
class InstSysTime(Instruction):
def execute(self, fr):
fr.data_push(int(time.time()))
@instr("systime_precise")
class InstSysTimePrecise(Instruction):
def execute(self, fr):
fr.data_push(float(time.time()))
# vim: expandtab tabstop=4 shiftwidth=4 softtabstop=4 nowrap
| {
"content_hash": "53144d39a2c3329aa194949830f1afd5",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 60,
"avg_line_length": 25.235294117647058,
"alnum_prop": 0.6317016317016317,
"repo_name": "revarbat/mufsim",
"id": "f56dcdf173ea668782ad8e708aae523b012338d0",
"size": "1716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mufsim/insts/timedate.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Forth",
"bytes": "30297"
},
{
"name": "Makefile",
"bytes": "1027"
},
{
"name": "Python",
"bytes": "471958"
},
{
"name": "Shell",
"bytes": "606"
}
],
"symlink_target": ""
} |
"""Template tags and filters for Zinnia"""
import re
from hashlib import md5
from datetime import date
try:
from urllib.parse import urlencode
except ImportError: # Python 2
from urllib import urlencode
from django.db.models import Q
from django.db.models import Count
from django.conf import settings
from django.utils import timezone
from django.template import Library
from django.utils.encoding import smart_text
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils.html import conditional_escape
from django.template.defaultfilters import stringfilter
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django_comments.models import CommentFlag
from django_comments import get_model as get_comment_model
from tagging.models import Tag
from tagging.utils import calculate_cloud
from ..models.entry import Entry
from ..models.author import Author
from ..models.category import Category
from ..managers import DRAFT
from ..managers import tags_published
from ..flags import PINGBACK, TRACKBACK
from ..settings import PROTOCOL
from ..comparison import EntryPublishedVectorBuilder
from ..calendar import Calendar
from ..breadcrumbs import retrieve_breadcrumbs
WIDONT_REGEXP = re.compile(
r'\s+(\S+\s*)$')
DOUBLE_SPACE_PUNCTUATION_WIDONT_REGEXP = re.compile(
r'\s+([-+*/%=;:!?]+ \S+\s*)$')
END_PUNCTUATION_WIDONT_REGEXP = re.compile(
r'\s+([?!]+\s*)$')
register = Library()
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_categories(context, template='zinnia/tags/categories.html'):
"""
Return the published categories.
"""
return {'template': template,
'categories': Category.published.all().annotate(
count_entries_published=Count('entries')),
'context_category': context.get('category')}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_categories_tree(context, template='zinnia/tags/categories_tree.html'):
"""
Return the categories as a tree.
"""
return {'template': template,
'categories': Category.objects.all(),
'context_category': context.get('category')}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_authors(context, template='zinnia/tags/authors.html'):
"""
Return the published authors.
"""
return {'template': template,
'authors': Author.published.all().annotate(
count_entries_published=Count('entries')),
'context_author': context.get('author')}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_recent_entries(number=5, template='zinnia/tags/entries_recent.html'):
"""
Return the most recent entries.
"""
return {'template': template,
'entries': Entry.published.all()[:number]}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_featured_entries(number=5,
template='zinnia/tags/entries_featured.html'):
"""
Return the featured entries.
"""
return {'template': template,
'entries': Entry.published.filter(featured=True)[:number]}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_draft_entries(number=5,
template='zinnia/tags/entries_draft.html'):
"""
Return the latest draft entries.
"""
return {'template': template,
'entries': Entry.objects.filter(status=DRAFT)[:number]}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_random_entries(number=5, template='zinnia/tags/entries_random.html'):
"""
Return random entries.
"""
return {'template': template,
'entries': Entry.published.order_by('?')[:number]}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_popular_entries(number=5, template='zinnia/tags/entries_popular.html'):
"""
Return popular entries.
"""
return {'template': template,
'entries': Entry.published.filter(
comment_count__gt=0).order_by(
'-comment_count', '-publication_date')[:number]}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_similar_entries(context, number=5,
template='zinnia/tags/entries_similar.html'):
"""
Return similar entries.
"""
entry = context.get('entry')
if not entry:
return {'template': template, 'entries': []}
vectors = EntryPublishedVectorBuilder()
entries = vectors.get_related(entry, number)
return {'template': template,
'entries': entries}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_archives_entries(template='zinnia/tags/entries_archives.html'):
"""
Return archives entries.
"""
return {'template': template,
'archives': Entry.published.datetimes(
'publication_date', 'month', order='DESC')}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_archives_entries_tree(
template='zinnia/tags/entries_archives_tree.html'):
"""
Return archives entries as a tree.
"""
return {'template': template,
'archives': Entry.published.datetimes(
'publication_date', 'day', order='ASC')}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_calendar_entries(context, year=None, month=None,
template='zinnia/tags/entries_calendar.html'):
"""
Return an HTML calendar of entries.
"""
if not (year and month):
day_week_month = (context.get('day') or
context.get('week') or
context.get('month'))
publication_date = getattr(context.get('object'),
'publication_date', None)
if day_week_month:
current_month = day_week_month
elif publication_date:
if settings.USE_TZ:
publication_date = timezone.localtime(publication_date)
current_month = publication_date.date()
else:
today = timezone.now()
if settings.USE_TZ:
today = timezone.localtime(today)
current_month = today.date()
current_month = current_month.replace(day=1)
else:
current_month = date(year, month, 1)
dates = list(map(
lambda x: settings.USE_TZ and timezone.localtime(x).date() or x.date(),
Entry.published.datetimes('publication_date', 'month')))
if current_month not in dates:
dates.append(current_month)
dates.sort()
index = dates.index(current_month)
previous_month = index > 0 and dates[index - 1] or None
next_month = index != len(dates) - 1 and dates[index + 1] or None
calendar = Calendar()
return {'template': template,
'next_month': next_month,
'previous_month': previous_month,
'calendar': calendar.formatmonth(
current_month.year,
current_month.month,
previous_month=previous_month,
next_month=next_month)}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_recent_comments(number=5, template='zinnia/tags/comments_recent.html'):
"""
Return the most recent comments.
"""
# Using map(smart_text... fix bug related to issue #8554
entry_published_pks = map(smart_text,
Entry.published.values_list('id', flat=True))
content_type = ContentType.objects.get_for_model(Entry)
comments = get_comment_model().objects.filter(
Q(flags=None) | Q(flags__flag=CommentFlag.MODERATOR_APPROVAL),
content_type=content_type, object_pk__in=entry_published_pks,
is_public=True).order_by('-pk')[:number]
comments = comments.prefetch_related('content_object')
return {'template': template,
'comments': comments}
@register.inclusion_tag('zinnia/tags/dummy.html')
def get_recent_linkbacks(number=5,
template='zinnia/tags/linkbacks_recent.html'):
"""
Return the most recent linkbacks.
"""
entry_published_pks = map(smart_text,
Entry.published.values_list('id', flat=True))
content_type = ContentType.objects.get_for_model(Entry)
linkbacks = get_comment_model().objects.filter(
content_type=content_type,
object_pk__in=entry_published_pks,
flags__flag__in=[PINGBACK, TRACKBACK],
is_public=True).order_by('-pk')[:number]
linkbacks = linkbacks.prefetch_related('content_object')
return {'template': template,
'linkbacks': linkbacks}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def zinnia_pagination(context, page, begin_pages=1, end_pages=1,
before_pages=2, after_pages=2,
template='zinnia/tags/pagination.html'):
"""
Return a Digg-like pagination,
by splitting long list of page into 3 blocks of pages.
"""
GET_string = ''
for key, value in context['request'].GET.items():
if key != 'page':
GET_string += '&%s=%s' % (key, value)
begin = list(page.paginator.page_range[:begin_pages])
end = list(page.paginator.page_range[-end_pages:])
middle = list(page.paginator.page_range[
max(page.number - before_pages - 1, 0):page.number + after_pages])
if set(begin) & set(middle): # [1, 2, 3], [2, 3, 4], [...]
begin = sorted(set(begin + middle)) # [1, 2, 3, 4]
middle = []
elif begin[-1] + 1 == middle[0]: # [1, 2, 3], [4, 5, 6], [...]
begin += middle # [1, 2, 3, 4, 5, 6]
middle = []
elif middle[-1] + 1 == end[0]: # [...], [15, 16, 17], [18, 19, 20]
end = middle + end # [15, 16, 17, 18, 19, 20]
middle = []
elif set(middle) & set(end): # [...], [17, 18, 19], [18, 19, 20]
end = sorted(set(middle + end)) # [17, 18, 19, 20]
middle = []
if set(begin) & set(end): # [1, 2, 3], [...], [2, 3, 4]
begin = sorted(set(begin + end)) # [1, 2, 3, 4]
middle, end = [], []
elif begin[-1] + 1 == end[0]: # [1, 2, 3], [...], [4, 5, 6]
begin += end # [1, 2, 3, 4, 5, 6]
middle, end = [], []
return {'template': template,
'page': page,
'begin': begin,
'middle': middle,
'end': end,
'GET_string': GET_string}
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def zinnia_breadcrumbs(context, root_name=_('Blog'),
template='zinnia/tags/breadcrumbs.html',):
"""
Return a breadcrumb for the application.
"""
path = context['request'].path
context_object = context.get('object') or context.get('category') or \
context.get('tag') or context.get('author')
context_page = context.get('page_obj')
breadcrumbs = retrieve_breadcrumbs(path, context_object,
context_page, root_name)
return {'template': template,
'breadcrumbs': breadcrumbs}
@register.simple_tag
def get_gravatar(email, size=80, rating='g', default=None,
protocol=PROTOCOL):
"""
Return url for a Gravatar.
"""
GRAVATAR_PROTOCOLS = {'http': 'http://www',
'https': 'https://secure'}
url = '%s.gravatar.com/avatar/%s' % (
GRAVATAR_PROTOCOLS[protocol],
md5(email.strip().lower().encode('utf-8')).hexdigest())
options = {'s': size, 'r': rating}
if default:
options['d'] = default
url = '%s?%s' % (url, urlencode(options))
return url.replace('&', '&')
@register.assignment_tag
def get_tags():
"""
Return the published tags.
"""
return Tag.objects.usage_for_queryset(
Entry.published.all())
@register.inclusion_tag('zinnia/tags/dummy.html', takes_context=True)
def get_tag_cloud(context, steps=6, min_count=None,
template='zinnia/tags/tag_cloud.html'):
"""
Return a cloud of published tags.
"""
tags = Tag.objects.usage_for_queryset(
Entry.published.all(), counts=True,
min_count=min_count)
return {'template': template,
'tags': calculate_cloud(tags, steps),
'context_tag': context.get('tag')}
@register.filter(needs_autoescape=True)
@stringfilter
def widont(value, autoescape=None):
"""
Add an HTML non-breaking space between the final
two words of the string to avoid "widowed" words.
"""
esc = autoescape and conditional_escape or (lambda x: x)
def replace(matchobj):
return ' %s' % matchobj.group(1)
value = END_PUNCTUATION_WIDONT_REGEXP.sub(replace, esc(smart_text(value)))
value = WIDONT_REGEXP.sub(replace, value)
value = DOUBLE_SPACE_PUNCTUATION_WIDONT_REGEXP.sub(replace, value)
return mark_safe(value)
@register.filter
def week_number(date):
"""
Return the Python week number of a date.
The django \|date:"W" returns incompatible value
with the view implementation.
"""
week_number = date.strftime('%W')
if int(week_number) < 10:
week_number = week_number[-1]
return week_number
@register.filter
def comment_admin_urlname(action):
"""
Return the admin URLs for the comment app used.
"""
comment = get_comment_model()
return 'admin:%s_%s_%s' % (
comment._meta.app_label, comment._meta.model_name,
action)
@register.filter
def user_admin_urlname(action):
"""
Return the admin URLs for the user app used.
"""
user = get_user_model()
return 'admin:%s_%s_%s' % (
user._meta.app_label, user._meta.model_name,
action)
@register.inclusion_tag('zinnia/tags/dummy.html')
def zinnia_statistics(template='zinnia/tags/statistics.html'):
"""
Return statistics on the content of Zinnia.
"""
content_type = ContentType.objects.get_for_model(Entry)
discussions = get_comment_model().objects.filter(
content_type=content_type)
entries = Entry.published
categories = Category.objects
tags = tags_published()
authors = Author.published
replies = discussions.filter(
flags=None, is_public=True)
pingbacks = discussions.filter(
flags__flag=PINGBACK, is_public=True)
trackbacks = discussions.filter(
flags__flag=TRACKBACK, is_public=True)
rejects = discussions.filter(is_public=False)
entries_count = entries.count()
replies_count = replies.count()
pingbacks_count = pingbacks.count()
trackbacks_count = trackbacks.count()
if entries_count:
first_entry = entries.order_by('publication_date')[0]
last_entry = entries.latest()
months_count = (last_entry.publication_date -
first_entry.publication_date).days / 31.0
entries_per_month = entries_count / (months_count or 1.0)
comments_per_entry = float(replies_count) / entries_count
linkbacks_per_entry = float(pingbacks_count + trackbacks_count) / \
entries_count
total_words_entry = 0
for e in entries.all():
total_words_entry += e.word_count
words_per_entry = float(total_words_entry) / entries_count
words_per_comment = 0.0
if replies_count:
total_words_comment = 0
for c in replies.all():
total_words_comment += len(c.comment.split())
words_per_comment = float(total_words_comment) / replies_count
else:
words_per_entry = words_per_comment = entries_per_month = \
comments_per_entry = linkbacks_per_entry = 0.0
return {'template': template,
'entries': entries_count,
'categories': categories.count(),
'tags': tags.count(),
'authors': authors.count(),
'comments': replies_count,
'pingbacks': pingbacks_count,
'trackbacks': trackbacks_count,
'rejects': rejects.count(),
'words_per_entry': words_per_entry,
'words_per_comment': words_per_comment,
'entries_per_month': entries_per_month,
'comments_per_entry': comments_per_entry,
'linkbacks_per_entry': linkbacks_per_entry}
| {
"content_hash": "16ffd5c9296d3c568e996ae8d32f6e09",
"timestamp": "",
"source": "github",
"line_count": 488,
"max_line_length": 79,
"avg_line_length": 33.618852459016395,
"alnum_prop": 0.6169694014384981,
"repo_name": "marctc/django-blog-zinnia",
"id": "00511a4fceb5ea7e46b2eaba456dccaea490b84f",
"size": "16406",
"binary": false,
"copies": "6",
"ref": "refs/heads/develop",
"path": "zinnia/templatetags/zinnia.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "77438"
},
{
"name": "HTML",
"bytes": "76134"
},
{
"name": "JavaScript",
"bytes": "235617"
},
{
"name": "Makefile",
"bytes": "1789"
},
{
"name": "Python",
"bytes": "524720"
}
],
"symlink_target": ""
} |
from ...models import Judge
from ...utils.logging import logger
import click
import requests
import json
from .utils import get_data, login_oauth
from beautifultable import BeautifulTable
from ...utils.Errors import AuthenticationError
from .models import Problem, Contest, Testcase
from ...utils import style
class Codechef(Judge):
def __init__(self, session_data=None):
logger.debug("Initializing class Codechef with session_data:\n%s"
% session_data)
self.name = "codechef"
self.url = "https://www.codechef.com"
self.api_url = "https://api.codechef.com"
self.session_data = session_data
if(session_data is not None):
self._update_session()
def check_login(self):
logger.debug("Checking Login")
if(self.session is None):
logger.debug("No session object initialized")
return False
me_url = self._make_url(self.api_url, 'users/me')
try:
r = self._request_api(me_url)
logger.debug(r)
return True
except requests.RequestException:
return False
def login(self):
token = login_oauth()
self.session_data = token
self._update_session()
assert(self.check_login())
def logout(self):
logger.warning("Logout of CodeChef.")
click.confirm("Are you sure?", default=True, abort=True)
self.session_data = None
def get_running_contests(self):
logger.debug('get running contests')
contests = get_data.running_contests(self)
table = BeautifulTable()
table.width_exceed_policy = BeautifulTable.WEP_WRAP
# TODO: use map style.headers instead of str
# requires change with beautifultable. we may try dev version
# TODO: sort running and upcoming
table.column_headers = list(
map(style.header, ['code', 'name', 'end', 'start']))
for contest in contests:
table.append_row(
[
contest['code'], contest['name'],
str(contest['startDate']), str(contest['endDate'])
]
)
return table
def get_problem(self, problem_code, contest_code):
logger.info('fetching problem %s' % problem_code)
if(problem_code is not None):
if(contest_code is None):
contest_code = 'PRACTICE'
contest_code = contest_code.upper()
problem_code = problem_code.upper()
problem_url = self._make_url(
self.api_url, 'contests', contest_code, 'problems', problem_code)
problem_data = self._request_api(problem_url)
problem = Problem(data=problem_data)
problem.judge_name = self.name
problem.contest_code = contest_code
return problem
def get_contest(self, contest_code):
logger.info('fetching contest %s' % contest_code)
contest_code = contest_code.upper()
contest_url = self._make_url(self.api_url, 'contests', contest_code)
contest_data = self._request_api(contest_url)
contest = Contest(data=contest_data)
logger.debug(contest.problem_codes)
for problem_code in contest.problem_codes:
contest.problems.append(
self.get_problem(
contest_code=contest.code, problem_code=problem_code))
contest.judge_name = self.name
return contest
def get_problem_url(self, problem_code, contest_code):
if (contest_code is None):
contest_code = 'PRACTICE'
contest_code = contest_code.upper()
problem_code = problem_code.upper()
if(contest_code == 'PRACTICE'):
return self._make_url(self.url, 'problems', problem_code)
return self._make_url(self.url, contest_code,
'problems', problem_code)
def submit(self, problem, code_text, extension):
# TODO : correct this
logger.warn("Codechef api doesn't support the submissions yet.\n"
"Please contact them to support this.\n"
"You can also try codechef-web implementation.\n"
"https://github.com/termicoder/termicoder-codechef-web\n"
)
import pyperclip
pyperclip.copy(code_text)
if(problem.contest_code != 'PRACTICE'):
url = self._make_url(
self.url, problem.contest_code, 'submit', problem.code)
else:
url = self._make_url(
self.url, 'submit', problem.code)
logger.warn("By the time,"
"We have copied the code to clipboard "
"and are launching the submit url\n%s\n"
"Please paste your code there and submit" % url)
click.confirm("Continue?", default=True)
click.launch(url)
def get_contest_url(self, contest_code):
if (contest_code is None):
contest_code = 'PRACTICE'
contest_code = contest_code.upper()
if(contest_code == 'PRACTICE'):
return self._make_url(self.url, 'problems', 'school')
return self._make_url(self.url, contest_code)
def get_contests_list_url(self):
return self._make_url(self.url, 'contests')
def _update_session(self):
self.session = requests.Session()
def debug_url(r, *args, **kwargs):
logger.debug('Getting url %s' % r.url)
def debug_data(r, *args, **kwargs):
try:
response = json.dumps(r.json(), indent=1)
except json.JSONDecodeError:
response = r.text
logger.debug('Response %s' % response)
self.session.hooks['response'].append(debug_url)
self.session.hooks['response'].append(debug_data)
token = self.session_data['result']['data']['access_token']
logger.debug('Token: ' + token)
OAuth2_Header = {
'Authorization': 'Bearer %s' % token
}
self.session.headers.update(OAuth2_Header)
def get_testcase(self, inp, ans, code):
return Testcase(inp=inp, ans=ans, code=code)
def _make_url(self, base_url, *rel_urls):
logger.debug(base_url)
logger.debug(rel_urls)
base_url = base_url.strip('/')
join_urls = [base_url]
for rel_url in rel_urls:
join_urls.append(rel_url.strip('/'))
return "/".join(join_urls)
def _request_api(self, url):
logger.debug('fetching url %s' % url)
try:
with self.session as s:
r = s.get(url)
if(r.status_code == 401):
logger.error("Authentication failed trying refreshing token")
self.refresh_login()
logger.debug(r)
# s is still bound to older session object so use self.session
r = self.session.get(url)
r.raise_for_status()
return r.json()
except AttributeError:
raise AuthenticationError(
'The endpoint %s requires authorization\n'
'Try `termicoder setup --login -j codechef` first'
'and grant appropriate rights' % url)
def refresh_login(self):
logger.debug('refreshing token')
url = 'http://termicoder.diveshuttam.me/refresh_token'
# TODO implement this on server side
logger.debug(self.session_data)
try:
r = requests.get(url, params={'data': json.dumps(self.session_data)})
logger.debug("response for refresh:")
logger.debug(r.text)
r.raise_for_status()
self.session_data = r.json()
self._update_session()
logger.debug(r.text)
except requests.exceptions.HTTPError:
self.session_data = None
logger.error("Refreshing token failed, please try :"
"`termicoder setup -j codechef --login`")
| {
"content_hash": "faa397e2e86d0eb90d65f52d44158d90",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 82,
"avg_line_length": 38.9375,
"alnum_prop": 0.5711816273614027,
"repo_name": "termicoder/termicoder",
"id": "dc0025936bd34c1234f3e3fc4e018d13c947a496",
"size": "8264",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "termicoder/judges/codechef/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1748"
},
{
"name": "Python",
"bytes": "99801"
},
{
"name": "Roff",
"bytes": "14761"
}
],
"symlink_target": ""
} |
"""
Created on Wed Mar 16 15:40:29 2016
@author: okada
$Id: signature.py 205 2017-08-08 06:25:59Z aokada $
"""
########### js template
js_header = """(function() {
sig_data = {};
"""
js_footer = """
})();
Object.freeze(sig_data);
"""
js_dataset = """
sig_data.tooltip_format = {{
signature_title:{signature_title},
signature_partial:{signature_partial},
mutation_title:{mutation_title},
mutation_partial:{mutation_partial},
}};
sig_data.signatures = [{signatures}];
sig_data.sig_colors = [{colors}];
sig_data.dataset_sig = [{dataset_sig}];
sig_data.dataset_sig_max = {dataset_sig_max};
sig_data.route_id = [{route_id}];
sig_data.substitution = [{substruction}];
// [ID, signature, value]
sig_data.mutations = [{mutations}];
sig_data.mutation_count = [{mutation_count}];
sig_data.Ids = [{Ids}];
"""
js_substruction_template = "{{name: '{name}', color: '{color}', route: [{route}],}},"
########### HTML template
html_integral_template = """<table>
<tr>
<td style="vertical-align: top;" ><div style="float: left;" id="div_rate"></div></td>
<td style="vertical-align: top;><!-- legend --> <div style="float: left;" id='div_rate_legend_html'></div><div style="float: left;" id='div_rate_legend_svg'></div></td>
</tr>
<tr>
<td style="vertical-align: top;><div style="float: left;" id="div_integral"></div></td>
<td style="vertical-align: top;><!-- legend --> <div style="float: left;" id='div_integral_legend_html'></div><div style="float: left;" id='div_integral_legend_svg'></div></td>
</tr>
<tr>
<td colspan=2 style="padding-top: 20px;">
<p>View mode: <select id="chart_mode"></select></p>
<p>Sort by: <select id="chart_sort"></select></p>
</td>
</tr>
</table>
"""
########### functions
def output_html(params, config):
dataset = convert_tojs(params, config)
if dataset != None and dataset != {}:
create_html(dataset, params, config)
return dataset
def convert_tojs(params, config):
import os
import json
import math
import itertools
import paplot.subcode.tools as tools
import paplot.convert as convert
import paplot.color as color
# data read
try:
json_data = json.load(open(params["data"]))
except Exception as e:
print ("failure open data %s, %s" % (params["data"], e.message))
return None
key_ids = tools.config_getstr(config, "result_format_signature", "key_id")
key_signature = tools.config_getstr(config, "result_format_signature", "key_signature")
key_mutations = tools.config_getstr(config, "result_format_signature", "key_mutation")
key_mutation_count = tools.config_getstr(config, "result_format_signature", "key_mutation_count")
sig_num = len(json_data[key_signature])
if sig_num == 0:
print ("no data %s" % params["data"])
return {}
# signature names
signature_list = []
for s in range(sig_num):
signature_list.append("Signature %d" % (s+1))
# each signature colors
sig_color_list = color.create_color_array(sig_num, color.r_set2)
# use background?
if tools.config_getboolean(config, "result_format_signature", "background"):
signature_list.append("Background ")
sig_color_list.append(color.r_set2_gray)
# axis-y max
sig_y_max = tools.config_getint(config, "signature", "signature_y_max")
if (sig_y_max < 0):
for sig in json_data[key_signature]:
for sub in sig:
m = max(sub)
if sig_y_max < m:
sig_y_max = m
# route list
sub_num = len(json_data[key_signature][0][0])
log = math.log(sub_num, 4)
if log % 1 > 0:
print ("substitution's list length is invalid (%d, not number 4^N)" % sub_num)
return None
route_id = []
route_list = []
for p in itertools.product(("A","C","G","T"), repeat = int(log)):
route_id.append("".join(p))
route_list.append(p)
# substruction
sub_di = [
{"name":"C > A", "ref":"C", "color":tools.config_getstr(config, "signature", "alt_color_CtoA")},
{"name":"C > G", "ref":"C", "color":tools.config_getstr(config, "signature", "alt_color_CtoG")},
{"name":"C > T", "ref":"C", "color":tools.config_getstr(config, "signature", "alt_color_CtoT")},
{"name":"T > A", "ref":"T", "color":tools.config_getstr(config, "signature", "alt_color_TtoA")},
{"name":"T > C", "ref":"T", "color":tools.config_getstr(config, "signature", "alt_color_TtoC")},
{"name":"T > G", "ref":"T", "color":tools.config_getstr(config, "signature", "alt_color_TtoG")},
]
substruction = ""
for sub in sub_di:
route = []
for r in route_list:
route.append("p".join(r[0:int(log/2)]) + "p" + sub["ref"] + "p" + "p".join(r[int(log/2):]))
substruction += js_substruction_template.format(name = sub["name"], color = sub["color"], route = convert.list_to_text(route))
# Id list
id_txt = ""
if key_ids in json_data:
id_txt = convert.list_to_text(json_data[key_ids])
# mutations
mutations_txt = ""
if key_mutations in json_data:
for m in json_data[key_mutations]:
mutations_txt += "[%d,%d,%f]," % (m[0],m[1],m[2])
# signature
dataset_sig = ""
for sig in json_data[key_signature]:
tmp = ""
for sub in sig:
tmp += "[" + ",".join(map(str, sub)) + "],"
dataset_sig += ("[" + tmp + "],")
mutation_count_txt = ""
if (key_mutation_count != "") and (key_mutation_count in json_data.keys()):
for v in json_data[key_mutation_count]:
mutation_count_txt += "%d," % v
# output
sig_num_sift = 0
if tools.config_getboolean(config, "result_format_signature", "background"):
sig_num_sift = 1
ellipsis = "%s%d" % (params["ellipsis"], (sig_num + sig_num_sift))
js_file = "data_%s.js" % ellipsis
html_file = "graph_%s.html" % ellipsis
keys_di = {"sig":"", "route":"", "id":""}
f = open(params["dir"] + "/" + js_file, "w")
f.write(js_header \
+ js_dataset.format(Ids = id_txt, \
signatures = convert.list_to_text(signature_list), \
colors = convert.list_to_text(sig_color_list), \
dataset_sig_max = sig_y_max, \
mutations = mutations_txt, \
dataset_sig = dataset_sig, \
route_id = convert.list_to_text(route_id), \
substruction = substruction, \
signature_title = convert.pyformat_to_jstooltip_text(keys_di, config, "signature", "", "tooltip_format_signature_title"), \
signature_partial = convert.pyformat_to_jstooltip_text(keys_di, config, "signature", "", "tooltip_format_signature_partial"), \
mutation_title = convert.pyformat_to_jstooltip_text(keys_di, config, "signature", "", "tooltip_format_mutation_title"), \
mutation_partial = convert.pyformat_to_jstooltip_text(keys_di, config, "signature", "", "tooltip_format_mutation_partial"), \
mutation_count = mutation_count_txt, \
)
)
f_template = open(os.path.dirname(os.path.abspath(__file__)) + "/templates/data_signature.js")
js_function = f_template.read()
f_template.close()
f.write(js_function)
f.write(js_footer)
f.close()
integral = True
if key_ids == "" or key_mutations == "" or key_mutation_count == "":
integral = False
return {"sig_num": sig_num,
"js": js_file,
"html": html_file,
"intergral": integral,
}
def create_html(dataset, params, config):
import os
import paplot.subcode.tools as tools
import paplot.prep as prep
html_div_template = "<div style='float: left;' id='div_pm{id}'></div>\n"
html_add_template = "sig_draw.add_div('div_pm{id}');\n"
div_text = ""
add_text = ""
for i in range(dataset["sig_num"]):
div_text += html_div_template.format(id = i)
add_text += html_add_template.format(id = i)
integral_text = ""
if dataset["intergral"] == True:
integral_text = html_integral_template
f_template = open(os.path.dirname(os.path.abspath(__file__)) + "/templates/graph_signature.html")
html_template = f_template.read()
f_template.close()
sig_num_sift = 0
if tools.config_getboolean(config, "result_format_signature", "background"):
sig_num_sift = 1
f_html = open(params["dir"] + "/" + dataset["html"], "w")
f_html.write(
html_template.format(project = params["project"],
title = "%s(#sig %d)" % (params["title"], dataset["sig_num"] + sig_num_sift),
data_js = dataset["js"],
version = prep.version_text(),
date = tools.now_string(),
divs = div_text,
add_divs = add_text,
integral = integral_text,
style = "../style/%s" % os.path.basename(tools.config_getpath(config, "style", "path", "default.js")),
))
f_html.close()
| {
"content_hash": "114ea6b2e88b13003142c474e18ac163",
"timestamp": "",
"source": "github",
"line_count": 257,
"max_line_length": 176,
"avg_line_length": 35.68093385214008,
"alnum_prop": 0.5735005452562705,
"repo_name": "Genomon-Project/paplot",
"id": "d0bf131cfe23e230013b8aeca123af12e2a3b595",
"size": "9194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/paplot/signature.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5539"
},
{
"name": "Dockerfile",
"bytes": "3287"
},
{
"name": "HTML",
"bytes": "51141"
},
{
"name": "JavaScript",
"bytes": "451406"
},
{
"name": "Python",
"bytes": "171399"
},
{
"name": "Shell",
"bytes": "3022"
},
{
"name": "TeX",
"bytes": "5133"
}
],
"symlink_target": ""
} |
"""
Utility functions from 2to3, 3to2 and python-modernize (and some home-grown
ones).
Licences:
2to3: PSF License v2
3to2: Apache Software License (from 3to2/setup.py)
python-modernize licence: BSD (from python-modernize/LICENSE)
"""
from lib2to3.fixer_util import (FromImport, Newline, is_import,
find_root, does_tree_import, Comma)
from lib2to3.pytree import Leaf, Node
from lib2to3.pygram import python_symbols as syms, python_grammar
from lib2to3.pygram import token
from lib2to3.fixer_util import (Node, Call, Name, syms, Comma, Number)
import re
def canonical_fix_name(fix, avail_fixes):
"""
Examples:
>>> canonical_fix_name('fix_wrap_text_literals')
'libfuturize.fixes.fix_wrap_text_literals'
>>> canonical_fix_name('wrap_text_literals')
'libfuturize.fixes.fix_wrap_text_literals'
>>> canonical_fix_name('wrap_te')
ValueError("unknown fixer name")
>>> canonical_fix_name('wrap')
ValueError("ambiguous fixer name")
"""
if ".fix_" in fix:
return fix
else:
if fix.startswith('fix_'):
fix = fix[4:]
# Infer the full module name for the fixer.
# First ensure that no names clash (e.g.
# lib2to3.fixes.fix_blah and libfuturize.fixes.fix_blah):
found = [f for f in avail_fixes
if f.endswith('fix_{0}'.format(fix))]
if len(found) > 1:
raise ValueError("Ambiguous fixer name. Choose a fully qualified "
"module name instead from these:\n" +
"\n".join(" " + myf for myf in found))
elif len(found) == 0:
raise ValueError("Unknown fixer. Use --list-fixes or -l for a list.")
return found[0]
## These functions are from 3to2 by Joe Amenta:
def Star(prefix=None):
return Leaf(token.STAR, u'*', prefix=prefix)
def DoubleStar(prefix=None):
return Leaf(token.DOUBLESTAR, u'**', prefix=prefix)
def Minus(prefix=None):
return Leaf(token.MINUS, u'-', prefix=prefix)
def commatize(leafs):
"""
Accepts/turns: (Name, Name, ..., Name, Name)
Returns/into: (Name, Comma, Name, Comma, ..., Name, Comma, Name)
"""
new_leafs = []
for leaf in leafs:
new_leafs.append(leaf)
new_leafs.append(Comma())
del new_leafs[-1]
return new_leafs
def indentation(node):
"""
Returns the indentation for this node
Iff a node is in a suite, then it has indentation.
"""
while node.parent is not None and node.parent.type != syms.suite:
node = node.parent
if node.parent is None:
return u""
# The first three children of a suite are NEWLINE, INDENT, (some other node)
# INDENT.value contains the indentation for this suite
# anything after (some other node) has the indentation as its prefix.
if node.type == token.INDENT:
return node.value
elif node.prev_sibling is not None and node.prev_sibling.type == token.INDENT:
return node.prev_sibling.value
elif node.prev_sibling is None:
return u""
else:
return node.prefix
def indentation_step(node):
"""
Dirty little trick to get the difference between each indentation level
Implemented by finding the shortest indentation string
(technically, the "least" of all of the indentation strings, but
tabs and spaces mixed won't get this far, so those are synonymous.)
"""
r = find_root(node)
# Collect all indentations into one set.
all_indents = set(i.value for i in r.pre_order() if i.type == token.INDENT)
if not all_indents:
# nothing is indented anywhere, so we get to pick what we want
return u" " # four spaces is a popular convention
else:
return min(all_indents)
def suitify(parent):
"""
Turn the stuff after the first colon in parent's children
into a suite, if it wasn't already
"""
for node in parent.children:
if node.type == syms.suite:
# already in the prefered format, do nothing
return
# One-liners have no suite node, we have to fake one up
for i, node in enumerate(parent.children):
if node.type == token.COLON:
break
else:
raise ValueError(u"No class suite and no ':'!")
# Move everything into a suite node
suite = Node(syms.suite, [Newline(), Leaf(token.INDENT, indentation(node) + indentation_step(node))])
one_node = parent.children[i+1]
one_node.remove()
one_node.prefix = u''
suite.append_child(one_node)
parent.append_child(suite)
def NameImport(package, as_name=None, prefix=None):
"""
Accepts a package (Name node), name to import it as (string), and
optional prefix and returns a node:
import <package> [as <as_name>]
"""
if prefix is None:
prefix = u""
children = [Name(u"import", prefix=prefix), package]
if as_name is not None:
children.extend([Name(u"as", prefix=u" "),
Name(as_name, prefix=u" ")])
return Node(syms.import_name, children)
_compound_stmts = (syms.if_stmt, syms.while_stmt, syms.for_stmt, syms.try_stmt, syms.with_stmt)
_import_stmts = (syms.import_name, syms.import_from)
def import_binding_scope(node):
"""
Generator yields all nodes for which a node (an import_stmt) has scope
The purpose of this is for a call to _find() on each of them
"""
# import_name / import_from are small_stmts
assert node.type in _import_stmts
test = node.next_sibling
# A small_stmt can only be followed by a SEMI or a NEWLINE.
while test.type == token.SEMI:
nxt = test.next_sibling
# A SEMI can only be followed by a small_stmt or a NEWLINE
if nxt.type == token.NEWLINE:
break
else:
yield nxt
# A small_stmt can only be followed by either a SEMI or a NEWLINE
test = nxt.next_sibling
# Covered all subsequent small_stmts after the import_stmt
# Now to cover all subsequent stmts after the parent simple_stmt
parent = node.parent
assert parent.type == syms.simple_stmt
test = parent.next_sibling
while test is not None:
# Yes, this will yield NEWLINE and DEDENT. Deal with it.
yield test
test = test.next_sibling
context = parent.parent
# Recursively yield nodes following imports inside of a if/while/for/try/with statement
if context.type in _compound_stmts:
# import is in a one-liner
c = context
while c.next_sibling is not None:
yield c.next_sibling
c = c.next_sibling
context = context.parent
# Can't chain one-liners on one line, so that takes care of that.
p = context.parent
if p is None:
return
# in a multi-line suite
while p.type in _compound_stmts:
if context.type == syms.suite:
yield context
context = context.next_sibling
if context is None:
context = p.parent
p = context.parent
if p is None:
break
def ImportAsName(name, as_name, prefix=None):
new_name = Name(name)
new_as = Name(u"as", prefix=u" ")
new_as_name = Name(as_name, prefix=u" ")
new_node = Node(syms.import_as_name, [new_name, new_as, new_as_name])
if prefix is not None:
new_node.prefix = prefix
return new_node
def is_docstring(node):
"""
Returns True if the node appears to be a docstring
"""
return (node.type == syms.simple_stmt and
len(node.children) > 0 and node.children[0].type == token.STRING)
def future_import(feature, node):
"""
This seems to work
"""
root = find_root(node)
if does_tree_import(u"__future__", feature, node):
return
# Look for a shebang or encoding line
shebang_encoding_idx = None
for idx, node in enumerate(root.children):
# Is it a shebang or encoding line?
if is_shebang_comment(node) or is_encoding_comment(node):
shebang_encoding_idx = idx
if is_docstring(node):
# skip over docstring
continue
names = check_future_import(node)
if not names:
# not a future statement; need to insert before this
break
if feature in names:
# already imported
return
import_ = FromImport(u'__future__', [Leaf(token.NAME, feature, prefix=" ")])
if shebang_encoding_idx == 0 and idx == 0:
# If this __future__ import would go on the first line,
# detach the shebang / encoding prefix from the current first line.
# and attach it to our new __future__ import node.
import_.prefix = root.children[0].prefix
root.children[0].prefix = u''
# End the __future__ import line with a newline and add a blank line
# afterwards:
children = [import_ , Newline()]
root.insert_child(idx, Node(syms.simple_stmt, children))
def future_import2(feature, node):
"""
An alternative to future_import() which might not work ...
"""
root = find_root(node)
if does_tree_import(u"__future__", feature, node):
return
insert_pos = 0
for idx, node in enumerate(root.children):
if node.type == syms.simple_stmt and node.children and \
node.children[0].type == token.STRING:
insert_pos = idx + 1
break
for thing_after in root.children[insert_pos:]:
if thing_after.type == token.NEWLINE:
insert_pos += 1
continue
prefix = thing_after.prefix
thing_after.prefix = u""
break
else:
prefix = u""
import_ = FromImport(u"__future__", [Leaf(token.NAME, feature, prefix=u" ")])
children = [import_, Newline()]
root.insert_child(insert_pos, Node(syms.simple_stmt, children, prefix=prefix))
def parse_args(arglist, scheme):
u"""
Parse a list of arguments into a dict
"""
arglist = [i for i in arglist if i.type != token.COMMA]
ret_mapping = dict([(k, None) for k in scheme])
for i, arg in enumerate(arglist):
if arg.type == syms.argument and arg.children[1].type == token.EQUAL:
# argument < NAME '=' any >
slot = arg.children[0].value
ret_mapping[slot] = arg.children[2]
else:
slot = scheme[i]
ret_mapping[slot] = arg
return ret_mapping
# def is_import_from(node):
# """Returns true if the node is a statement "from ... import ..."
# """
# return node.type == syms.import_from
def is_import_stmt(node):
return (node.type == syms.simple_stmt and node.children and
is_import(node.children[0]))
def touch_import_top(package, name_to_import, node):
"""Works like `does_tree_import` but adds an import statement at the
top if it was not imported (but below any __future__ imports) and below any
comments such as shebang lines).
Based on lib2to3.fixer_util.touch_import()
Calling this multiple times adds the imports in reverse order.
Also adds "standard_library.install_aliases()" after "from future import
standard_library". This should probably be factored into another function.
"""
root = find_root(node)
if does_tree_import(package, name_to_import, root):
return
# Ideally, we would look for whether futurize --all-imports has been run,
# as indicated by the presence of ``from builtins import (ascii, ...,
# zip)`` -- and, if it has, we wouldn't import the name again.
# Look for __future__ imports and insert below them
found = False
for name in ['absolute_import', 'division', 'print_function',
'unicode_literals']:
if does_tree_import('__future__', name, root):
found = True
break
if found:
# At least one __future__ import. We want to loop until we've seen them
# all.
start, end = None, None
for idx, node in enumerate(root.children):
if check_future_import(node):
start = idx
# Start looping
idx2 = start
while node:
node = node.next_sibling
idx2 += 1
if not check_future_import(node):
end = idx2
break
break
assert start is not None
assert end is not None
insert_pos = end
else:
# No __future__ imports.
# We look for a docstring and insert the new node below that. If no docstring
# exists, just insert the node at the top.
for idx, node in enumerate(root.children):
if node.type != syms.simple_stmt:
break
if not is_docstring(node):
# This is the usual case.
break
insert_pos = idx
if package is None:
import_ = Node(syms.import_name, [
Leaf(token.NAME, u"import"),
Leaf(token.NAME, name_to_import, prefix=u" ")
])
else:
import_ = FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")])
if name_to_import == u'standard_library':
# Add:
# standard_library.install_aliases()
# after:
# from future import standard_library
install_hooks = Node(syms.simple_stmt,
[Node(syms.power,
[Leaf(token.NAME, u'standard_library'),
Node(syms.trailer, [Leaf(token.DOT, u'.'),
Leaf(token.NAME, u'install_aliases')]),
Node(syms.trailer, [Leaf(token.LPAR, u'('),
Leaf(token.RPAR, u')')])
])
]
)
children_hooks = [install_hooks, Newline()]
else:
children_hooks = []
# FromImport(package, [Leaf(token.NAME, name_to_import, prefix=u" ")])
children_import = [import_, Newline()]
old_prefix = root.children[insert_pos].prefix
root.children[insert_pos].prefix = u''
root.insert_child(insert_pos, Node(syms.simple_stmt, children_import, prefix=old_prefix))
if len(children_hooks) > 0:
root.insert_child(insert_pos + 1, Node(syms.simple_stmt, children_hooks))
## The following functions are from python-modernize by Armin Ronacher:
# (a little edited).
def check_future_import(node):
"""If this is a future import, return set of symbols that are imported,
else return None."""
# node should be the import statement here
savenode = node
if not (node.type == syms.simple_stmt and node.children):
return set()
node = node.children[0]
# now node is the import_from node
if not (node.type == syms.import_from and
# node.type == token.NAME and # seems to break it
hasattr(node.children[1], 'value') and
node.children[1].value == u'__future__'):
return set()
if node.children[3].type == token.LPAR:
node = node.children[4]
else:
node = node.children[3]
# now node is the import_as_name[s]
# print(python_grammar.number2symbol[node.type]) # breaks sometimes
if node.type == syms.import_as_names:
result = set()
for n in node.children:
if n.type == token.NAME:
result.add(n.value)
elif n.type == syms.import_as_name:
n = n.children[0]
assert n.type == token.NAME
result.add(n.value)
return result
elif node.type == syms.import_as_name:
node = node.children[0]
assert node.type == token.NAME
return set([node.value])
elif node.type == token.NAME:
return set([node.value])
else:
# TODO: handle brackets like this:
# from __future__ import (absolute_import, division)
assert False, "strange import: %s" % savenode
SHEBANG_REGEX = r'^#!.*python'
ENCODING_REGEX = r"^#.*coding[:=]\s*([-\w.]+)"
def is_shebang_comment(node):
"""
Comments are prefixes for Leaf nodes. Returns whether the given node has a
prefix that looks like a shebang line or an encoding line:
#!/usr/bin/env python
#!/usr/bin/python3
"""
return bool(re.match(SHEBANG_REGEX, node.prefix))
def is_encoding_comment(node):
"""
Comments are prefixes for Leaf nodes. Returns whether the given node has a
prefix that looks like an encoding line:
# coding: utf-8
# encoding: utf-8
# -*- coding: <encoding name> -*-
# vim: set fileencoding=<encoding name> :
"""
return bool(re.match(ENCODING_REGEX, node.prefix))
def wrap_in_fn_call(fn_name, args, prefix=None):
"""
Example:
>>> wrap_in_fn_call("oldstr", (arg,))
oldstr(arg)
>>> wrap_in_fn_call("olddiv", (arg1, arg2))
olddiv(arg1, arg2)
>>> wrap_in_fn_call("olddiv", [arg1, comma, arg2, comma, arg3])
olddiv(arg1, arg2, arg3)
"""
assert len(args) > 0
if len(args) == 2:
expr1, expr2 = args
newargs = [expr1, Comma(), expr2]
else:
newargs = args
return Call(Name(fn_name), newargs, prefix=prefix)
| {
"content_hash": "ee3b0bf4a13fddf3684abca1efbd0742",
"timestamp": "",
"source": "github",
"line_count": 520,
"max_line_length": 105,
"avg_line_length": 33.64230769230769,
"alnum_prop": 0.5923745284097405,
"repo_name": "ryfeus/lambda-packs",
"id": "48e4689db96917b39586f1f939142741dd46203d",
"size": "17494",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "H2O/ArchiveH2O/libfuturize/fixer_util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9768343"
},
{
"name": "C++",
"bytes": "76566960"
},
{
"name": "CMake",
"bytes": "191097"
},
{
"name": "CSS",
"bytes": "153538"
},
{
"name": "Cuda",
"bytes": "61768"
},
{
"name": "Cython",
"bytes": "3110222"
},
{
"name": "Fortran",
"bytes": "110284"
},
{
"name": "HTML",
"bytes": "248658"
},
{
"name": "JavaScript",
"bytes": "62920"
},
{
"name": "MATLAB",
"bytes": "17384"
},
{
"name": "Makefile",
"bytes": "152150"
},
{
"name": "Python",
"bytes": "549307737"
},
{
"name": "Roff",
"bytes": "26398"
},
{
"name": "SWIG",
"bytes": "142"
},
{
"name": "Shell",
"bytes": "7790"
},
{
"name": "Smarty",
"bytes": "4090"
},
{
"name": "TeX",
"bytes": "152062"
},
{
"name": "XSLT",
"bytes": "305540"
}
],
"symlink_target": ""
} |
"""Default Hyperparameter configuration."""
import ml_collections
def get_config():
"""Get the default hyperparameter configuration."""
config = ml_collections.ConfigDict()
config.learning_rate = 0.1
config.momentum = 0.9
config.batch_size = 128
config.num_epochs = 10
return config
| {
"content_hash": "b21250a2fdaf79635009a8242d647516",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 53,
"avg_line_length": 21.5,
"alnum_prop": 0.7209302325581395,
"repo_name": "google/flax",
"id": "822bf7fb8d382bd8bfc8f06761f2c8107d4c8cb2",
"size": "883",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "examples/mnist/configs/default.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2643"
},
{
"name": "Jupyter Notebook",
"bytes": "12612"
},
{
"name": "Python",
"bytes": "956526"
},
{
"name": "Shell",
"bytes": "3995"
}
],
"symlink_target": ""
} |
"""Models of the aps_purchasing app."""
from django.db import models
from django.utils.translation import ugettext_lazy as _
class AML(models.Model):
"""
Approved Manufacturer List.
List of IPNs that may only be purchased through approved manufacturers.
:ipn: The IPN instance.
:manufacturer: A manufacturer who is approved for the given IPN.
"""
ipn = models.ForeignKey(
'aps_bom.IPN',
verbose_name=_('Internal Part Number'),
)
manufacturer = models.ForeignKey(
'aps_purchasing.Manufacturer',
verbose_name=('Manufacturer'),
)
class Meta:
verbose_name = _('AML')
verbose_name_plural = _('AMLs')
def __unicode__(self):
return u'{0} - {1}'.format(self.ipn, self.manufacturer)
class Currency(models.Model):
"""
Currency.
:iso_code: The ISO code of this currency.
:name: The name of this currency.
:sign: The sign of this currency.
"""
iso_code = models.CharField(
verbose_name=_('ISO code'),
max_length=3,
)
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
sign = models.CharField(
verbose_name=_('Sign'),
max_length=1,
)
class Meta:
verbose_name = _('Currency')
verbose_name_plural = _('Currencies')
def __unicode__(self):
return self.iso_code
class Distributor(models.Model):
"""
Distributor.
:name: Name of this distributor.
:questionnaire_form: Name of the form that was used for this distributor.
:supplier_form: TODO: Describe this field.
:min_order_value: Minimum order value for this distributor.
:currency: Default currency for this distributor.
:payment_terms: Payment terms for this distributor.
:is_approved: If ``True``, this distributor is approved for business.
:is_active: If ``False``, this distributor cannot be used, even if it was
approved before.
"""
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
questionnaire_form = models.CharField(
verbose_name=_('Questionnaire form'),
max_length=128,
)
supplier_form = models.CharField(
verbose_name=_('Supplier form'),
max_length=128,
)
min_order_value = models.DecimalField(
verbose_name=_('Minimum order value'),
max_digits=11,
decimal_places=5,
)
currency = models.ForeignKey(
'aps_purchasing.Currency',
verbose_name=_('Currency'),
)
payment_terms = models.ForeignKey(
'aps_purchasing.PaymentTerm',
verbose_name=_('Payment terms'),
related_name='distributors',
)
is_approved = models.BooleanField(
verbose_name=_('Is approved'),
default=False,
)
is_active = models.BooleanField(
verbose_name=_('Is active'),
default=True,
)
def __unicode__(self):
return self.name
class DPN(models.Model):
"""
Distributor Part Number.
:code: The code of this DPN.
:ipn: The internal part number this DPN is mapped to.
:distributor: The distributor who sells this part.
:name: The name of this part.
:mpn: The manufacturer part number for this part.
"""
code = models.CharField(
verbose_name=_('Code'),
max_length=50,
)
ipn = models.ForeignKey(
'aps_bom.IPN',
verbose_name=_('Internal Part Number'),
related_name='dpns',
null=True, blank=True,
)
distributor = models.ForeignKey(
'aps_purchasing.Distributor',
verbose_name=_('Distributor'),
related_name='dpns',
)
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
mpn = models.ForeignKey(
'aps_purchasing.MPN',
verbose_name=_('Manufacturer Part Number'),
related_name='dpns',
)
class Meta:
verbose_name = _('DPN')
verbose_name_plural = _('DPNs')
def __unicode__(self):
return self.code
class Manufacturer(models.Model):
"""
Manufacturer.
:code: Internal code for this manufacturer.
:name: Name of this manufacturer.
"""
code = models.CharField(
verbose_name=_('Code'),
max_length=50,
)
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
def __unicode__(self):
return self.name
class MPN(models.Model):
"""
Manufacturer Part Number.
:code: The code of this MPN.
:manufacturer: The manufacturer of this part.
:name: The name of this part.
:pku: The amount of parts in one pacakge unit.
:unit: The package unit of this part.
"""
code = models.CharField(
verbose_name=_('Code'),
max_length=50,
)
manufacturer = models.ForeignKey(
'aps_purchasing.Manufacturer',
verbose_name=_('Manufacturer'),
)
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
pku = models.FloatField(
verbose_name=_('Amount per packaging unit'),
)
unit = models.ForeignKey(
'aps_purchasing.PackagingUnit',
verbose_name=_('PackagingUnit'),
)
class Meta:
verbose_name = _('MPN')
verbose_name_plural = _('MPNs')
def __unicode__(self):
return self.code
class PackagingUnit(models.Model):
"""
Packaging Unit.
:name: The name of this unit.
"""
name = models.CharField(
verbose_name=_('Name'),
max_length=128,
)
def __unicode__(self):
return self.name
class PaymentTerm(models.Model):
"""
Payment term.
:code: Internal code for this payment term.
:description: Description of this payment term.
"""
code = models.CharField(
verbose_name=_('Code'),
max_length=50,
)
description = models.CharField(
verbose_name=_('Description'),
max_length=128,
)
def __unicode__(self):
return self.code
class Price(models.Model):
"""
Price.
:quotation_item: The quotation item this price belongs to.
:moq: TODO: Describe this field
:price: The price.
:currency: The currency for this price.
"""
quotation_item = models.ForeignKey(
'aps_purchasing.QuotationItem',
verbose_name=_('Quotation item'),
related_name='prices',
)
moq = models.DecimalField(
verbose_name=_('MOQ'),
max_digits=11,
decimal_places=5,
)
price = models.DecimalField(
verbose_name=_('price'),
max_digits=11,
decimal_places=5,
)
currency = models.ForeignKey(
Currency,
verbose_name=_('Currency'),
related_name='prices',
)
class Meta:
ordering = ('price', )
def __unicode__(self):
return u'{0}: {1} {2}'.format(
self.quotation_item, self.price, self.currency)
def get_ipn(self):
distributor = self.quotation_item.quotation.distributor
qs = Price.objects.filter(
quotation_item__mpn__DPNs__distributor=distributor)
qs = qs.values_list(
'quotation_item__mpn__DPNs__ipn__code').distinct()[0][0]
return qs
class Quotation(models.Model):
"""
Quotation.
:distributor: The distributor offering this quotation.
:ref_number: Reference number for this quotation.
:issuance_date: Issuance date of this quotation.
:expiry_date: Expiry date for this quotation.
:completed: TODO: Describe this field.
"""
distributor = models.ForeignKey(
Distributor,
verbose_name=_('Distributor'),
related_name='quotations',
)
ref_number = models.CharField(
verbose_name=_('Reference number'),
max_length=128
)
issuance_date = models.DateTimeField(
verbose_name=_('Issuance date'),
)
expiry_date = models.DateTimeField(
verbose_name=_('Expiry date'),
)
is_completed = models.BooleanField(
verbose_name=_('Completed'),
)
def __unicode__(self):
return self.ref_number
class QuotationItem(models.Model):
"""
Quotation item.
:quotation: The quotation this item belongs to.
:manufacturer: The manufacturer mannufacturing this product.
:mpn: The MPN this quotation belongs refers to.
:min_lead_time: TODO: Describe this field.
:max_lead_time: TODO: Describe this field.
"""
quotation = models.ForeignKey(
Quotation,
verbose_name=_('Quotation'),
related_name='quotation_items',
)
manufacturer = models.ForeignKey(
Manufacturer,
verbose_name=_('Manufacturer'),
related_name='quotations',
blank=True, null=True,
)
mpn = models.ForeignKey(
MPN,
verbose_name=_('Manufacturer Part Number'),
related_name='quotation_items',
)
min_lead_time = models.PositiveIntegerField(
verbose_name=_('Minimum lead time'),
)
max_lead_time = models.PositiveIntegerField(
verbose_name=_('Maximum lead time'),
)
def __unicode__(self):
return u'{0} - {1}'.format(self.quotation, self.mpn)
| {
"content_hash": "ef1d1e735fa58db2d9dd9c488f89cd2b",
"timestamp": "",
"source": "github",
"line_count": 405,
"max_line_length": 77,
"avg_line_length": 22.94814814814815,
"alnum_prop": 0.592317624273725,
"repo_name": "bitmazk/django-aps-purchasing",
"id": "56f4ed51a73d8c22f66836bfe0ac66ee4e30f1a1",
"size": "9294",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aps_purchasing/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4554"
},
{
"name": "Python",
"bytes": "75047"
},
{
"name": "Shell",
"bytes": "5131"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import numpy as np
import threading
# Generating some simple data
r = np.arange(0.0,100003.0)
raw_data = np.dstack((r,r,r,r))[0]
raw_target = np.array([[1,0,0]] * 100003)
# are used to feed data into our queue
queue_input_data = tf.placeholder(tf.float32, shape=[20, 4])
queue_input_target = tf.placeholder(tf.float32, shape=[20, 3])
queue = tf.FIFOQueue(capacity=50, dtypes=[tf.float32, tf.float32], shapes=[[4], [3]])
enqueue_op = queue.enqueue_many([queue_input_data, queue_input_target])
dequeue_op = queue.dequeue()
# tensorflow recommendation:
# capacity = min_after_dequeue + (num_threads + a small safety margin) * batch_size
data_batch, target_batch = tf.train.batch(dequeue_op, batch_size=15, capacity=40)
# use this to shuffle batches:
# data_batch, target_batch = tf.train.shuffle_batch(dequeue_op, batch_size=15, capacity=40, min_after_dequeue=5)
def enqueue(sess):
""" Iterates over our data puts small junks into our queue."""
under = 0
max = len(raw_data)
while True:
print("starting to write into queue")
upper = under + 20
print("try to enqueue ", under, " to ", upper)
if upper <= max:
curr_data = raw_data[under:upper]
curr_target = raw_target[under:upper]
under = upper
else:
rest = upper - max
curr_data = np.concatenate((raw_data[under:max], raw_data[0:rest]))
curr_target = np.concatenate((raw_target[under:max], raw_target[0:rest]))
under = rest
sess.run(enqueue_op, feed_dict={queue_input_data: curr_data,
queue_input_target: curr_target})
print("added to the queue")
print("finished enqueueing")
# start the threads for our FIFOQueue and batch
sess = tf.Session()
enqueue_thread = threading.Thread(target=enqueue, args=[sess])
enqueue_thread.isDaemon()
enqueue_thread.start()
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
# Fetch the data from the pipeline and put it where it belongs (into your model)
for i in range(5):
run_options = tf.RunOptions(timeout_in_ms=4000)
curr_data_batch, curr_target_batch = sess.run([data_batch, target_batch], options=run_options)
print(curr_data_batch)
# shutdown everything to avoid zombies
sess.run(queue.close(cancel_pending_enqueues=True))
coord.request_stop()
coord.join(threads)
sess.close()
Blog | {
"content_hash": "62f36c093c129929f311e1f9fbad9783",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 112,
"avg_line_length": 34.838235294117645,
"alnum_prop": 0.6956521739130435,
"repo_name": "kafkasl/contextualLSTM",
"id": "b9ce68ac88dfac0076bdb0e0a3542f2fc6b585cc",
"size": "2469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/lstm/input_pipeline.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "127138"
},
{
"name": "Shell",
"bytes": "8976"
}
],
"symlink_target": ""
} |
import serial
import create
# Create the Create!
robot = create.Create("/dev/ttyUSB0")
# Safe Mode does not allow Create to move why plugged in, run off cliffs, and stops and adjusts when bumped.
# Full Mode ignores all that
#robot.toSafeMode()
robot.toFullMode()
# Move forward in millimeters; optional second parameters is speed (cm/s)
robot.move(6)
# Stop
robot.stop()
# Play a C chord
robot.playSong( [(60,8),(64,8),(67,8),(72.)] )
# Get current position of Create (x,y,theta),
# where theta is the angle the Create is facing.
pose = robot.getPose();
print "My X is %s" % pose[0]
print "My Y is %s" % pose[1]
print "My Theta is %s" % pose[2]
| {
"content_hash": "b175817ad0c48d6c0a90d847e0efee83",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 108,
"avg_line_length": 24.185185185185187,
"alnum_prop": 0.6983154670750383,
"repo_name": "Kaceykaso/design_by_roomba",
"id": "346bd8093bb39906100372db4f0e902e5c123ac2",
"size": "835",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "python/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9377"
},
{
"name": "PHP",
"bytes": "24947"
},
{
"name": "Python",
"bytes": "76133"
}
],
"symlink_target": ""
} |
from models.scf import Frame, database
import warnings, MySQLdb
warnings.filterwarnings("ignore", category=MySQLdb.Warning)
## This class calculates loglikelihodd for each frame
# @author Adriano Zanette
# @version 1.0
class LogLikelihood:
## Calculates loglikelihood for each frame
# @author Adriano Zanette
# @version 1.0
@staticmethod
def calculate():
print 'Calculating frame loglikelihood...'
sql = """ UPDATE """+Frame._meta.db_table+""" AS f
JOIN (
SELECT """+Frame.id.db_column+""",
"""+Frame.relativeFrequency.db_column+"""+0.0 as p1,
"""+Frame.frequency.db_column+"""+0.0 as k1,
"""+Frame.verbFrequency.db_column+"""+0.0 as n1,
("""+Frame.frameFrequency.db_column+""" - """+Frame.frequency.db_column+""")+0.0 as k2,
(framesTotal - """+Frame.verbFrequency.db_column+""")+0.0 as n2,
(("""+Frame.frameFrequency.db_column+""" - """+Frame.frequency.db_column+""")/(framesTotal - """+Frame.verbFrequency.db_column+"""))+0.0 as p2 ,
(("""+Frame.frameFrequency.db_column+""")/framesTotal)+0.0 as p
FROM """+Frame._meta.db_table+"""
JOIN (
SELECT SUM("""+Frame.frequency.db_column+""") AS framesTotal
FROM """+Frame._meta.db_table+""") AS total
) AS faux
ON faux."""+Frame.id.db_column+""" = f."""+Frame.id.db_column+"""
SET """+Frame.logLikelihoodRatio.db_column+""" =
2 * ( (k1*LOG10(p1)+(n1-k1)*LOG10(1-p1))
+(k2*LOG10(p2)+(n2-k2)*LOG10(1-p2))
-(k1*LOG10(p)+(n1-k1)*LOG10(1-p))
-(k2*LOG10(p)+(n2-k2)*LOG10(1-p)))"""
query = database.execute_sql(sql)
sql = """ UPDATE """+Frame._meta.db_table+""" AS f
JOIN (
SELECT MAX("""+Frame.logLikelihoodRatio.db_column+""") as maxLLR
FROM """+Frame._meta.db_table+"""
) AS faux
SET """+Frame.logLikelihoodRatio.db_column+""" = """+Frame.logLikelihoodRatio.db_column+""" / maxLLR"""
query = database.execute_sql(sql)
| {
"content_hash": "cc35791bdbf5d67156d4a510a75cb647",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 176,
"avg_line_length": 52.6875,
"alnum_prop": 0.4665875840253064,
"repo_name": "adzanette/scf-extractor",
"id": "0b08661dbd6aaf939f9c2979f2061f990e687a37",
"size": "2529",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scf-extractor/statistics/LogLikelihood.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "51698"
},
{
"name": "PHP",
"bytes": "131430"
},
{
"name": "Python",
"bytes": "423162"
}
],
"symlink_target": ""
} |
import os
import webapp2
import jinja2
import config
from app.utils.compressor import WEBASSETS_ENV
JINJA_ENV = jinja2.Environment(
autoescape=lambda x: True,
extensions=['jinja2.ext.autoescape',
'webassets.ext.jinja2.AssetsExtension'],
loader=jinja2.FileSystemLoader(
os.path.join(config.PROJECT_ROOT, 'templates')),
)
JINJA_ENV.globals.update({'uri_for': webapp2.uri_for})
JINJA_ENV.assets_environment = WEBASSETS_ENV
| {
"content_hash": "d277049cb87f8d25862156972dc54ebd",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 56,
"avg_line_length": 23.1,
"alnum_prop": 0.7229437229437229,
"repo_name": "ronbeltran/webapp2-bedrock",
"id": "1486390989e81040ce5dfa57404584bdd0aad30e",
"size": "462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bedrock/app/utils/jinja.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32"
},
{
"name": "HTML",
"bytes": "2437"
},
{
"name": "Python",
"bytes": "4772"
}
],
"symlink_target": ""
} |
import unittest
import re
from unittest.mock import patch
from flask import current_app, url_for, g
from flask_login import current_user
from webapp import dbsql, mail
from webapp.utils import create_user
from webapp.admin import forms
from webapp.controllers import get_user_by_email
from webapp.notifications import send_confirmation_email
from .base import BaseTestCase
from tests.utils import (
makeOnePage,
makeOneJournal,
makeOneIssue,
makeOneArticle,
makeOneCollection, makeOneSponsor
)
reset_pwd_url_pattern = re.compile('href="(.*)">')
email_confirm_url_pattern = re.compile('href="(.*)">')
class AdminViewsTestCase(BaseTestCase):
def test_unauthorized_access_to_admin_index_must_redirect(self):
"""
Quando:
acessamos a pagina o admin/index, sem ter feito login.
Verificamos:
que é feito um redirect para admin/login
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
admin_index_url = url_for('admin.index')
expected_login_url = url_for('admin.login_view')
# when
response = c.get(admin_index_url, follow_redirects=False)
# then
self.assertStatus(response, 302)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertRedirects(response, expected_login_url)
def test_access_to_admin_index_must_redirect_to_login_form(self):
"""
Quando:
acessamos a pagina o admin/index, sem ter feito login.
Verificamos:
que é feito um redirect para admin/login
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
admin_index_url = url_for('admin.index')
# when
response = c.get(admin_index_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('admin/auth/login.html')
def test_invalid_credentials_login_must_show_validation_error(self):
"""
Com:
dados válido para fazer login, de um usuário que *NÃO* existe.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando: usuário inválido.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': '[email protected]',
'password': '123'
}
expected_errors_msg = {
'password': u'<span class="help-block">Usuário inválido</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_invalid_user_login_must_show_validation_error(self):
"""
Com:
dados para fazer login: email inválida, senha válida.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando:
email inválido e usuário inválido.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': 'foo', # email inválido
'password': '123'
}
expected_errors_msg = {
'email': u'<span class="help-block">Invalid email address.</span>',
'password': u'<span class="help-block">Usuário inválido</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['email'], response.data.decode('utf-8'))
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_invalid_password_login_must_show_validation_error(self):
"""
Com:
dados para fazer login: email válido, senha inválida.
Quando:
tentamos fazer login com esses dados.
Verificamos:
- a pagina visualizada corresponde ao login.
- a pagina visualizada contem uma mensagem indicando senha requerida.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
login_credentials = {
'email': '[email protected]',
'password': '', # senha inválida
}
expected_errors_msg = {
'password': u'<span class="help-block">This field is required.</span>',
}
# when
response = c.post(login_url, data=login_credentials)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_errors_msg['password'], response.data.decode('utf-8'))
def test_login_successfully(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
o novo usuário faz login
Verificamos:
- a página visualizada corresponde ao admin/index
- a página visualizada contem link para fazer logout.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': '[email protected]',
'password': '123',
}
expected_page_header = u'<h1>OPAC Admin <small>da coleção: %s</small></h1>' % \
current_app.config['OPAC_COLLECTION'].upper()
expected_logout_url = url_for('admin.logout_view')
# when
create_user(
credentials['email'],
credentials['password'],
True)
# create new user:
response = c.post(login_url, data=credentials, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertIn(expected_page_header, response.data.decode('utf-8'))
self.assertIn(expected_logout_url, response.data.decode('utf-8'))
def test_login_valid_user_with_invalid_password_raise_error_msg(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
o novo usuário faz login, mas com a senha errada
Verificamos:
- a página visualizada corresponde ao admin/index
- a página visualizada deve informar de senha inválida
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': '[email protected]',
'password': '123',
}
logged_page_header = u'<h1>OPAC Admin <small>da coleção: %s</small></h1>' % \
current_app.config['OPAC_COLLECTION'].upper()
logout_url = url_for('admin.logout_view')
# when
create_user(
credentials['email'],
credentials['password'],
True)
# create new user:
response = c.post(
login_url,
data={
'email': credentials['email'],
'password': 'foo.bar',
},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertNotIn(logged_page_header, response.data.decode('utf-8'))
self.assertNotIn(logout_url, response.data.decode('utf-8'))
def test_login_page_must_have_link_to_password_reset(self):
"""
Quando:
acessamos a pagina de login
Verificamos:
na pagina aparece os link para: recuperar a senha
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
expected_reset_pwd_link = url_for('admin.reset')
# when
response = c.get(login_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_reset_pwd_link, response.data.decode('utf-8'))
def test_login_page_must_have_set_language_links(self):
"""
Com:
a lista de idiomas suportados pela app
Quando:
acesso a pagina de login
Verificamos:
na pagina aparecem os links para trocar de idioma
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
languages = current_app.config['LANGUAGES']
lang_urls = {}
for lang_code, lang_name in languages.items():
lang_urls[lang_code] = {
'url': url_for('main.set_locale', lang_code=lang_code),
'name': lang_name,
}
# when
response = c.get(login_url, follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
for lang_code, lang_data in lang_urls.items():
lang_url = lang_data['url']
lang_name = lang_data['name']
self.assertIn(lang_url, response.data.decode('utf-8'))
self.assertIn(lang_name, response.data.decode('utf-8'))
@unittest.skip("Falhou na chamada: get_context_variable depois de adicionar os withs")
def test_login_with_unconfirmed_user_must_not_proceed(self):
"""
Com:
um novo usuário (com email NÃO confirmado)
Quando:
o novo usuário faz login, com os dados certos
Verificamos:
- a página visualizada corresponde a admin/auth/unconfirm_email.html.
- a página visualizada deve informar do erro.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
credentials = {
'email': '[email protected]',
'password': '123',
}
expected_form_error = {'password': [u'Senha inválida']}
expected_error_msgs = [
u"Email não confirmado!",
u"Você <strong>deve</strong> confirmar seu email.<br>",
u"<strong>Por favor entre em contato com o administrador.</strong>"]
create_user(
credentials['email'],
credentials['password'],
False)
# when
# create new user:
response = c.post(
login_url,
data=credentials,
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
for msg in expected_error_msgs:
self.assertIn(msg, response.data.decode('utf-8'))
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.LoginForm)
self.assertEqual(expected_form_error, context_form.errors)
def test_logout_successfully(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
usuario faz login, e depois logout
Verificamos:
a operação (logout) é realizada com sucesso
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
login_url = url_for('admin.login_view')
logout_url = url_for('admin.logout_view')
credentials = {
'email': '[email protected]',
'password': '123',
}
# when
create_user(credentials['email'], credentials['password'], True)
login_response = c.post(login_url, data=credentials, follow_redirects=True)
self.assertStatus(login_response, 200)
logout_response = c.get(logout_url, follow_redirects=True)
# then
self.assertStatus(logout_response, 200)
self.assertTemplateUsed('admin/auth/login.html')
def test_reset_password_has_form_as_expected(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
a pagina carregada é a esperad com o formulario esperado
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
# when
response = c.get(reset_pwd_url)
# then
self.assertStatus(response, 200)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('admin/auth/reset.html')
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.EmailForm)
def test_reset_password_of_invalid_user_raise_404(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
deve mostrar uma pagina 404 com o aviso
de usuário não encontrado.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
user_email = '[email protected]'
expected_errors_msg = u'<p>Usuário não encontrado</p>'
# when
response = c.post(reset_pwd_url, data={'email': user_email})
# then
self.assertStatus(response, 404)
self.assertEqual('text/html; charset=utf-8', response.content_type)
self.assertTemplateUsed('errors/404.html')
error_msg = self.get_context_variable('message')
self.assertEqual(error_msg, expected_errors_msg)
def test_reset_password_of_valid_user_proceed_ok(self):
"""
Com:
um novo usuário (com email confirmado).
Quando:
solicitamos a recuperação de senha.
Verificamos:
A notifiação (flash) na página de que foram enviadas
as instruções para o email do novo usuário.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
credentials = {
'email': '[email protected]',
'password': '123'
}
# with
reset_pwd_url = url_for('admin.reset')
expected_msg = u'Enviamos as instruções para recuperar a senha para: %s' % \
credentials['email']
# when
create_user(credentials['email'], credentials['password'], True)
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/login.html')
self.assertIn(expected_msg, response.data.decode('utf-8'))
def test_reset_password_of_valid_user_email_sent(self):
"""
Com:
um novo usuário (com email confirmado)
Quando:
solicitar a recuperação de senha
Verificamos:
Que a mensagem no email enviado contém o
link para continuar a operação.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
with patch.dict(current_app.config, {'SERVER_NAME': 'localhost'}):
expected_email = {
'subject': u'Instruções para recuperar sua senha',
'recipients': [credentials['email'], ],
'body_has_link': u'<a href="http://%s%s' % (
current_app.config['SERVER_NAME'],
reset_pwd_url
)
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertStatus(response, 200)
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
self.assertEqual(expected_email['subject'], email_msg.subject)
self.assertEqual(expected_email['recipients'], email_msg.recipients)
self.assertIn(expected_email['body_has_link'], email_msg.html)
def test_reset_password_send_valid_link_via_email(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
solicitamos recuperar senha, e obtemos o email com
a url necessária para concluir a operação.
Verificamos:
- o email enviado contém um link para recupear senha.
- a pagina de recuparar senha com token seja a correta.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url]
self.assertEqual(1, len(resert_url_with_token))
resert_url_with_token = resert_url_with_token[0]
# requisição de reset passoword com token
reset_pwd_response = c.get(
resert_url_with_token,
follow_redirects=True)
self.assertStatus(reset_pwd_response, 200)
self.assertTemplateUsed('admin/auth/reset_with_token.html')
context_form = self.get_context_variable('form')
self.assertIsInstance(context_form, forms.PasswordForm)
def test_link_sent_via_email_to_reset_password_works_fine(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link do email.
4. inserimos uma nova senha para o úsuario.
Verificamos:
- a pagina de recuperar senha tenha o form esperado.
- a senha do usuário deve ser atualizada.
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
new_password = 'blaus'
response = c.post(
resert_url_with_token,
data={'password': new_password},
follow_redirects=True)
self.assertStatus(response, 200)
# verificação da nova senha do usuario
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(new_password))
def test_reset_password_with_invalid_password_raise_validation_error(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link do email.
4. inserimos uma senha inválida ('')
Verificamos:
- a pagina deve informar de que senha é requerida
- a senha do usuário não deve ser modificada
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
invalid_password = ''
response = c.post(
resert_url_with_token,
data={'password': invalid_password},
follow_redirects=True)
self.assertStatus(response, 200)
context_form = self.get_context_variable('form')
expected_form_error = {'password': [u'This field is required.']}
self.assertEqual(expected_form_error, context_form.errors)
self.assertIn(expected_form_error['password'][0], response.data.decode('utf-8'))
user = get_user_by_email(credentials['email'])
self.assertFalse(user.is_correct_password(invalid_password))
def test_reset_password_with_unconfirmed_email_shows_unconfirm_email_error(self):
"""
Com:
um novo usuário (com email NÃO confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. e solicitamos uma nova senha, com o link (token) do email.
Verificamos:
- a pagina deve informar que é necessário confirmar o email.
- a troca de senha não procede.
- a pagina deve mostrar o template admin/auth/unconfirm_email.html
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], False)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
# no foi enviado nenhum email
self.assertEqual(0, len(outbox))
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(credentials['password']))
def test_reset_password_with_unconfirmed_email_raise_validation_error_2(self):
"""
Com:
um novo usuário (com email confirmado),
Quando:
1. solicitamos recuperar senha.
2. obtemos o email com a url necessária para recuperação.
3. mudamos o usuário para ter seu email como NÃO confirmado.
4. e solicitamos uma nova senha, com o link (token) do email.
Verificamos:
- a pagina deve informar que é necessário confirmar o email.
- a troca de senha não procede.
- a pagina deve mostrar o template admin/auth/unconfirm_email.html
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
reset_pwd_url = url_for('admin.reset')
credentials = {
'email': '[email protected]',
'password': '123'
}
# when
create_user(credentials['email'], credentials['password'], True)
with mail.record_messages() as outbox:
response = c.post(
reset_pwd_url,
data={'email': credentials['email']},
follow_redirects=True)
# then
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# recupero os links do email
links_found = reset_pwd_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
resert_url_with_token = [url for url in links_found if reset_pwd_url in url][0]
# agora o usuário tem o email NÃO confirmado.
user = get_user_by_email(credentials['email'])
user.email_confirmed = False
dbsql.session.add(user)
dbsql.session.commit()
# tentamos recuperar a senha com o link/token do email
new_password = '321'
response = c.post(
resert_url_with_token,
data={'password': new_password},
follow_redirects=True)
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/auth/unconfirm_email.html')
user = get_user_by_email(credentials['email'])
self.assertTrue(user.is_correct_password(credentials['password']))
def test_reset_password_with_invalid_token_raise_404_error_page(self):
"""
Com:
- token inválido
Quando:
solicitar a recuperação de senha com token inválido
Verificamos:
mostra uma pagina de erro 404 com a mensagem de erro
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
invalid_token = 'foo.123.faketoken'
reset_with_token_url = url_for('admin.reset_with_token', token=invalid_token)
expected_errors_msg = u'<p>The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.</p>'
# when
response = c.get(reset_with_token_url, follow_redirects=True)
# then
self.assertStatus(response, 404)
self.assertTemplateUsed('errors/404.html')
error_message = self.get_context_variable('message')
self.assertEqual(expected_errors_msg, error_message)
def test_confirm_email_with_invalid_token_raise_404_message(self):
"""
Com:
- token inválido
Quando:
solicitar a confirmação de email com token inválido
Verificamos:
mostra uma pagina de erro 404 com a mensagem de erro
"""
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# with
invalid_token = 'foo.123.faketoken'
confirm_email_url = url_for('admin.confirm_email', token=invalid_token)
expected_errors_msg = u'<p>The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.</p>'
# when
response = c.get(confirm_email_url, follow_redirects=True)
# then
self.assertStatus(response, 404)
self.assertTemplateUsed('errors/404.html')
error_message = self.get_context_variable('message')
self.assertEqual(expected_errors_msg, error_message)
def test_confirmation_email_send_email_with_token(self):
"""
Com:
- o usuário 'administrador' logado (email confirmado)
- um novo usuário, com email NÃO confirmado
Quando:
1. enviamos emails de confirmação (utilizando a ação do admin/user)
2.
Verificamos:
- que o email enviado contem um link para confirmar email.
- o email é enviado para o destinatario certo.
- após a operação, a página é a correta.
- as notifiação para usuário deve ser mostrada na página.
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
normal_user = {
'email': '[email protected]',
'password': '123'
}
create_user(normal_user['email'], normal_user['password'], False)
login_url = url_for('admin.login_view')
action_payload = {
'action': 'confirm_email',
'rowid': get_user_by_email(normal_user['email']).id,
'url': '/admin/user/'
}
expected_email_sent_notifications = [
u"Enviamos o email de confirmação para: %s" % normal_user['email'],
u"1 usuários foram notificados com sucesso!",
]
expected_email = {
'subject': u'Confirmação de email',
'recipients': [normal_user['email'], ],
}
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# login do usuario admin
login_response = c.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
# requisição da ação para enviar email de confirmação
with mail.record_messages() as outbox:
action_response = c.post(
'/admin/user/action/',
data=action_payload,
follow_redirects=True)
# then
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
for msg in expected_email_sent_notifications:
self.assertIn(msg, action_response.data.decode('utf-8'))
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# email enviado ao destinatario certo, com assunto certo
self.assertEqual(expected_email['recipients'], email_msg.recipients)
# print "expected_email['subject']: ", expected_email['subject']
# print "email_msg.subject.decode('utf-8')", email_msg.subject
self.assertEqual(expected_email['subject'], email_msg.subject)
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
self.assertIsNotNone(email_confirmation_url_with_token)
self.assertFalse(email_confirmation_url_with_token == '')
def test_open_confirm_url_with_token_sent_via_email_open_the_correct_page(self):
"""
Com:
- o usuário 'administrador' logado (email confirmado)
- um novo usuário, com email NÃO confirmado
Quando:
1. enviamos emails de confirmação (utilizando a ação do admin/user)
2. acesssamos o link enviado por email
Verificamos:
- que o email enviado contem um link para confirmar email.
- após acessar o link, a página é a correta.
- após acessar o link, a págian mostra a notificação de operação ok.
- após acessar o link, o usuário tem seu email confirmado.
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
normal_user = {
'email': '[email protected]',
'password': '123'
}
create_user(normal_user['email'], normal_user['password'], False)
login_url = url_for('admin.login_view')
action_payload = {
'action': 'confirm_email',
'rowid': get_user_by_email(normal_user['email']).id,
'url': '/admin/user/'
}
expected_msg = u'Email: %s confirmado com sucesso!' % normal_user['email']
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
# login do usuario admin
login_response = c.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# requisição da ação para enviar email de confirmação
with mail.record_messages() as outbox:
action_response = c.post(
'/admin/user/action/',
data=action_payload,
follow_redirects=True)
# then
self.assertStatus(action_response, 200)
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
# acessamos o link do email
confirmation_response = c.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 200)
self.assertTemplateUsed('admin/index.html')
# confirmação com sucesso
self.assertIn(expected_msg, confirmation_response.data.decode('utf-8'))
# confirmamos alteração do usuário
user = get_user_by_email(normal_user['email'])
self.assertTrue(user.email_confirmed)
def test_email_confimation_token_of_invalid_user_raise_404_error_message(self):
"""
Com:
- email de usuário que não existe no sistema.
Quando:
1. enviamos emails de confirmação (utilizando diretamente notifications.py)
2. acesssamos o link enviado por email
Verificamos:
- que o email enviado contem um link para confirmar email.
- após acessar o link, a página mostra o erro 404 com a mensagem certa.
"""
# with
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as c:
fake_user_email = u'[email protected]'
# when
with mail.record_messages() as outbox:
send_confirmation_email(fake_user_email)
# then
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
# acessamos o link do email
confirmation_response = c.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 404)
self.assertTemplateUsed('errors/404.html')
error_msg = self.get_context_variable('message')
self.assertEqual(error_msg, error_msg)
@unittest.skip("Login form no lugar de um UserForm, pq?")
def test_create_user_from_admin_page_creates_a_new_user(self):
"""
Com:
- usuario administrador (com email confirmado)
Quando:
1. acessamos /admin e cadastramos um novo usuário
2. acesssamos o link enviado por email
Verificamos:
- o usuário é criado.
- o usuário administrador é notificodo do sucesso da operação.
- o novo usuário não tem email confirmado.
- o novo usuário é notificado por email para confirmar email.
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
new_user = {
'email': '[email protected]',
'password': '123'
}
login_url = url_for('admin.login_view')
create_user_url = '/admin/user/new/'
# expected_msgs = [
# u'Enviamos o email de confirmação para: %s' % new_user['email'],
# u'Registro criado com sucesso.',
# ]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with mail.record_messages() as outbox:
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# requisição da ação para enviar email de confirmação
create_user_response = client.post(
create_user_url,
data={'email': new_user['email']},
follow_redirects=True)
# then
self.assertStatus(create_user_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# for msg in expected_msgs:
# self.assertIn(msg, action_response.data.decode('utf-8'))
# temos um email
self.assertEqual(1, len(outbox))
email_msg = outbox[0]
# pegamos o link com token
links_found = email_confirm_url_pattern.findall(email_msg.html)
# tem pelo menos 1 link, e tem só um link para o reset/password com token
self.assertGreaterEqual(1, len(links_found))
email_confirmation_url_with_token = [url for url in links_found if '/admin/confirm/' in url]
# temos a url com o token
self.assertEqual(1, len(email_confirmation_url_with_token))
email_confirmation_url_with_token = email_confirmation_url_with_token[0]
self.assertIsNotNone(email_confirmation_url_with_token)
self.assertFalse(email_confirmation_url_with_token == '')
# acessamos o link do email
user = get_user_by_email(new_user['email'])
confirmation_response = self.client.get(email_confirmation_url_with_token, follow_redirects=True)
self.assertStatus(confirmation_response, 200)
self.assertTemplateUsed('admin/index.html')
# confirmação com sucesso
# self.assertIn(expected_msg, confirmation_response.data.decode('utf-8'))
# confirmamos alteração do usuário
self.assertTrue(user.email_confirmed)
@unittest.skip("Login form no lugar de um UserForm, pq?")
def test_try_to_create_user_without_email_must_raise_error_notification(self):
"""
Com:
- usuario administrador (com email confirmado)
Quando:
1. acessamos /admin
2. tentamos cadastrar um novo usuário, ** sem inserir email **
Verificamos:
- o usuário não é criado.
- o usuário administrado é notificodo do erro da operação.
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
# new_user = {
# 'email': '[email protected]',
# 'password': '123'
# }
login_url = url_for('admin.login_view')
create_user_url = '/admin/user/new/'
expected_form_error = {'email': [u'This field is required.']}
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with mail.record_messages() as outbox:
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# "preencher" from sem o email do novo usuário
create_user_response = client.post(
create_user_url,
data={'email': ''},
follow_redirects=True)
# then
self.assertStatus(create_user_response, 200)
self.assertTemplateUsed('admin/model/create.html')
# tem erro no formulario
context_form = self.get_context_variable('form')
self.assertEqual(expected_form_error, context_form.errors)
# não temos email
self.assertEqual(0, len(outbox))
# TEST ADMIN INDEX #
def test_admin_index_content_counts_is_ok(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina /admin
Verificamos:
- que a contagem de documentos (periódicos, números e artigos) totais esta certa.
- que a contagem de documentos (periódicos, números e artigos) publicadas esta certa.
"""
# with
j_pub = makeOneJournal({'is_public': True})
makeOneJournal({'is_public': False})
i_pub = makeOneIssue({'is_public': True, 'journal': j_pub})
makeOneIssue({'is_public': False, 'journal': j_pub})
makeOneArticle({'is_public': True, 'journal': j_pub, 'issue': i_pub})
makeOneArticle({'is_public': False, 'journal': j_pub, 'issue': i_pub})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# then
counts = self.get_context_variable('counts')
count_keys = [
'journals_total_count',
'journals_public_count',
'issues_total_count',
'issues_public_count',
'articles_total_count',
'articles_public_count',
]
for k in count_keys:
self.assertIn(k, count_keys)
# contagem de periódicos
journals_total_count = counts['journals_total_count']
self.assertEqual(2, journals_total_count)
journals_public_count = counts['journals_public_count']
self.assertEqual(1, journals_public_count)
# contagem de números
issues_total_count = counts['issues_total_count']
self.assertEqual(2, issues_total_count)
issues_public_count = counts['issues_public_count']
self.assertEqual(1, issues_public_count)
# contagem de artigos
articles_total_count = counts['articles_total_count']
self.assertEqual(2, articles_total_count)
articles_public_count = counts['articles_public_count']
self.assertEqual(1, articles_public_count)
class JournalAdminViewTests(BaseTestCase):
def test_admin_journal_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/journal/
Verificamos:
- o Journal criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
journal = makeOneJournal()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
journal_list_response = client.get(url_for('journal.index_view'))
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_list_response.data.decode('utf-8'))
def test_admin_journal_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do periódico: /admin/journal/details/
Verificamos:
- a pagina mostra o periódico certo
"""
# with
journal = makeOneJournal()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_detail_url = url_for('journal.details_view', id=journal.id)
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
journal_detail_response = client.get(journal_detail_url)
self.assertStatus(journal_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_detail_response.data.decode('utf-8'))
def test_admin_journal_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do periódico: /admin/journal/details/
- realizamos uma busca pelo id do periódico
Verificamos:
- a página mostra o periódico certo
"""
# with
journal = makeOneJournal()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
journal_search_response = client.get(journal_index_url, data={'search': journal.id})
self.assertStatus(journal_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(journal.id, journal_list_response.data.decode('utf-8'))
def test_admin_journal_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_col_filters = [
'current_status',
'index_at',
'is_public',
'unpublish_reason',
'scimago_id',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_journal_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_searchable_list = [
'_id', 'title', 'title_iso', 'short_title',
'print_issn', 'eletronic_issn', 'acronym',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_journal_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_exclude_list = [
'_id', 'jid', 'title_slug', 'timeline', 'subject_categories',
'study_areas', 'social_networks', 'title_iso', 'short_title',
'subject_descriptors', 'copyrighter', 'online_submission_url',
'cover_url', 'logo_url', 'previous_journal_ref',
'publisher_name', 'publisher_country', 'publisher_state',
'publisher_city', 'publisher_address', 'publisher_telephone',
'mission', 'index_at', 'sponsors', 'issue_count', 'other_titles',
'print_issn', 'eletronic_issn', 'unpublish_reason', 'url_segment',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_journal_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_formatters = [
'created',
'updated',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_journal_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_column_labels = [
'jid',
'collection',
'timeline',
'subject_categories',
'study_areas',
'social_networks',
'title',
'title_iso',
'short_title',
'created',
'updated',
'acronym',
'scielo_issn',
'print_issn',
'eletronic_issn',
'subject_descriptors',
'online_submission_url',
'cover_url',
'logo_url',
'other_titles',
'publisher_name',
'publisher_country',
'publisher_state',
'publisher_city',
'publisher_address',
'publisher_telephone',
'mission',
'index_at',
'sponsors',
'previous_journal_ref',
'current_status',
'issue_count',
'is_public',
'unpublish_reason',
'url_segment',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_journal_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_journal_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertTrue(can_edit)
def test_admin_journal_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_journal_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_journal_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_journal_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_journal_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
expected_actions = [
'publish',
'unpublish_default',
]
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_journal_action_publishing_an_unpublished_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de pubilcar
Verificamos:
- o periódico deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
publish_action_url = '%saction/' % journal_index_url
expected_msg = u'Periódico(s) publicado(s) com sucesso!!'
# when
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_publishing_a_public_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de pubilcar
Verificamos:
- o periódico deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Periódico(s) publicado(s) com sucesso!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_unpublish_default_a_public_journal(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de despublicar (unpublish_default)
Verificamos:
- o periódico deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = 'Periódico(s) despublicado com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'unpublish_default',
'rowid': journal.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertFalse(journal.is_public)
self.assertEqual(expected_reason, journal.unpublish_reason)
def test_admin_journal_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o periódico deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
journal = makeOneJournal({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Ocorreu um erro tentando publicar o(s) periódico(s)!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
journal.reload()
self.assertTrue(journal.is_public)
def test_admin_journal_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/journal/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o periódico deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
journal = makeOneJournal({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
journal_index_url = url_for('journal.index_view')
action_url = '%saction/' % journal_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) periódico(s)!!'
with current_app.app_context():
collection = makeOneCollection()
g.collection = collection
with current_app.test_request_context():
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
journal_list_response = client.get(journal_index_url)
self.assertStatus(journal_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': journal_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
journal.reload()
self.assertTrue(journal.is_public)
class IssueAdminViewTests(BaseTestCase):
def test_admin_issue_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/issue/
Verificamos:
- o Issue criado deve esta listado nessa página
"""
# with
issue = makeOneIssue()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de números
issue_list_response = client.get(url_for('issue.index_view'))
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao números
self.assertIn(issue.id, issue_list_response.data.decode('utf-8'))
def test_admin_issue_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do issue: /admin/issue/details/
Verificamos:
- a pagina mostra o issue certo
"""
# with
issue = makeOneIssue()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_detail_url = url_for('issue.details_view', id=issue.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
issue_detail_response = client.get(issue_detail_url)
self.assertStatus(issue_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao números
self.assertIn(issue.id, issue_detail_response.data.decode('utf-8'))
def test_admin_issue_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do issue: /admin/issue/details/
- realizamos uma busca pelo id do issue
Verificamos:
- a página mostra o issue certo
"""
# with
issue = makeOneIssue()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de issues
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
issue_search_response = client.get(issue_index_url, data={'search': issue.id})
self.assertStatus(issue_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(issue.id, issue_list_response.data.decode('utf-8'))
def test_admin_issue_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issue: /admin/issue/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_col_filters = [
'journal',
'volume',
'number',
'type',
'start_month',
'end_month',
'year',
'is_public',
'unpublish_reason',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_issue_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_searchable_list = [
'iid', 'journal', 'volume', 'number',
'label'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_issue_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_exclude_list = [
'_id', 'sections', 'cover_url', 'suppl_text',
'spe_text', 'start_month', 'end_month', 'order', 'label', 'order',
'unpublish_reason'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_issue_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_formatters = [
'created',
'updated',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_issue_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_column_labels = [
'iid',
'journal',
'sections',
'cover_url',
'volume',
'number',
'created',
'updated',
'type',
'suppl_text',
'spe_text',
'start_month',
'end_month',
'year',
'label',
'order',
'is_public',
'unpublish_reason',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_issue_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_issue_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertFalse(can_edit)
def test_admin_issue_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_issue_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_issue_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_issue_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_issue_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
expected_actions = [
'publish',
'unpublish_default',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_issue_action_publishing_an_unpublished_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issue: /admin/issue/
- realizamos a ação de pubilcar
Verificamos:
- o Issue deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
publish_action_url = '%saction/' % issue_index_url
expected_msg = u'Número(s) publicado(s) com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_publishing_a_public_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
- realizamos a ação de pubilcar
Verificamos:
- o issue deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Número(s) publicado(s) com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_unpublish_default_a_public_issue(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de issues: /admin/issue/
- realizamos a ação de despublicar (unpublish_default)
Verificamos:
- o issue deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = 'Número(s) despublicado(s) com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'unpublish_default',
'rowid': issue.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertFalse(issue.is_public)
self.assertEqual(expected_reason, issue.unpublish_reason)
def test_admin_issue_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o Issue deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
issue = makeOneIssue({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
issue.reload()
self.assertTrue(issue.is_public)
def test_admin_issue_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Issue no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Issues: /admin/issue/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o issue deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
issue = makeOneIssue({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
issue_index_url = url_for('issue.index_view')
action_url = '%saction/' % issue_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de Issues
issue_list_response = client.get(issue_index_url)
self.assertStatus(issue_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': issue_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
issue.reload()
self.assertTrue(issue.is_public)
class ArticleAdminViewTests(BaseTestCase):
def test_admin_article_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/article/
Verificamos:
- o Article criado deve esta listado nessa página
"""
# with
article = makeOneArticle({'title': u'foo bar baz'})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de números
article_list_response = client.get(url_for('article.index_view'))
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao número
self.assertIn(article.id, article_list_response.data.decode('utf-8'))
def test_admin_article_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do article: /admin/article/details/
Verificamos:
- a pagina mostra o article certo
"""
# with
article = makeOneArticle()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_detail_url = url_for('article.details_view', id=article.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de periódicos
article_detail_response = client.get(article_detail_url)
self.assertStatus(article_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao número
self.assertIn(article.id, article_detail_response.data.decode('utf-8'))
def test_admin_article_search_by_id(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do article: /admin/article/details/
- realizamos uma busca pelo id do article
Verificamos:
- a página mostra o article certo
"""
# with
article = makeOneArticle()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de articles
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
article_search_response = client.get(article_index_url, data={'search': article.id})
self.assertStatus(article_search_response, 200)
# que tem a id para acessar ao periódico
self.assertIn(article.id, article_list_response.data.decode('utf-8'))
def test_admin_article_check_column_filters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que contém todos os column_filters esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_col_filters = [
'issue', 'journal', 'is_aop', 'is_public', 'unpublish_reason', 'display_full_text'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_filters = self.get_context_variable('admin_view').column_filters
self.assertEqual(len(expected_col_filters), len(column_filters))
for expected_col_filter in expected_col_filters:
self.assertIn(expected_col_filter, column_filters)
def test_admin_article_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_searchable_list = [
'aid', 'issue', 'journal', 'title', 'domain_key'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
self.assertEqual(len(expected_column_searchable_list), len(column_searchable_list))
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
def test_admin_article_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_exclude_list = [
'_id', 'aid', 'section', 'is_aop', 'htmls', 'domain_key', 'xml',
'unpublish_reason', 'translated_titles', 'sections', 'pdfs', 'languages',
'original_language', 'created', 'abstract', 'authors', 'order',
'abstract_languages', 'elocation', 'fpage', 'lpage', 'url_segment'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_article_check_column_formatters(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todos os formatadores de campos como são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_formatters = [
'created',
'updated',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_formatters = self.get_context_variable('admin_view').column_formatters
self.assertEqual(len(expected_column_formatters), len(column_formatters))
for expected_column_formatter in expected_column_formatters:
self.assertIn(expected_column_formatter, column_formatters.keys())
def test_admin_article_check_column_labels_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_column_labels = [
'aid',
'issue',
'journal',
'title',
'section',
'is_aop',
'created',
'updated',
'htmls',
'domain_key',
'is_public',
'unpublish_reason',
'url_segment',
'pid',
'original_language',
'translated_titles',
'sections',
'authors',
'abstract',
'order',
'doi',
'languages',
'abstract_languages',
'display_full_text'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_labels = self.get_context_variable('admin_view').column_labels
self.assertEqual(len(expected_column_labels), len(column_labels))
for expected_column_label in expected_column_labels:
self.assertIn(expected_column_label, column_labels.keys())
def test_admin_article_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_article_check_can_edit_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertFalse(can_edit)
def test_admin_article_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_article_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_article_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_article_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_article_check_actions_defined(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
Verificamos:
- que contém todas as etiquetas de campos esperadas
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
expected_actions = [
'publish',
'unpublish_default',
'set_full_text_unavailable',
'set_full_text_available',
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
actions = [a[0] for a in self.get_context_variable('actions')]
self.assertEqual(len(expected_actions), len(actions))
for expected_action in expected_actions:
self.assertIn(expected_action, actions)
def test_admin_article_action_publishing_an_unpublished_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de pubilcar
Verificamos:
- o artigo deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
publish_action_url = '%saction/' % article_index_url
expected_msg = u'Artigo(s) publicado com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de artigos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# executamos ação publicar:
action_response = client.post(
publish_action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_publishing_a_public_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
- realizamos a ação de pubilcar
Verificamos:
- o article deve ficar como público
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Artigo(s) publicado com sucesso!!'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de artigos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_unpublish_default_a_public_article(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de articles: /admin/article/
- realizamos a ação de despublicar (unpublis_default)
Verificamos:
- o article deve ficar despublicado
- o motivo de despublicação deve ser por: 'Conteúdo temporariamente indisponível'
- o usuario é notificado do resultado da operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = 'Artigo(s) despublicado com sucesso!!'
expected_reason = 'Conteúdo temporariamente indisponível'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'unpublish_default',
'rowid': article.id,
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertFalse(article.is_public)
self.assertEqual(expected_reason, article.unpublish_reason)
def test_admin_article_action_publish_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=False)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de publicacar, mas é levantada uma exceção no processo
Verificamos:
- o Article deve ficar como não público (is_public=False)
- o usuario é notificado que houve um erro na operação
"""
# with
article = makeOneArticle({'is_public': False})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de periódicos
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'publish',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data.decode('utf-8'))
article.reload()
self.assertTrue(article.is_public)
def test_admin_article_action_unpublish_default_with_exception_raised_must_be_consistent(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Article no banco (is_public=True)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Articles: /admin/article/
- realizamos a ação de despublicacar (unpublish_default), mas é levantada uma exceção no processo
Verificamos:
- o article deve ficar como público (is_public=True)
- o usuario é notificado que houve um erro na operação
"""
# with
article = makeOneArticle({'is_public': True})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
article_index_url = url_for('article.index_view')
action_url = '%saction/' % article_index_url
expected_msg = u'Ocorreu um erro tentando despublicar o(s) número(s)!!.'
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acessamos a listagem de Issues
article_list_response = client.get(article_index_url)
self.assertStatus(article_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# executamos ação publicar:
with self.assertRaises(Exception):
action_response = client.post(
action_url,
data={
'url': article_index_url,
'action': 'unpublish_default',
'rowid': None, # sem rowid deveria gerar uma exeção
},
follow_redirects=True
)
self.assertStatus(action_response, 200)
self.assertTemplateUsed('admin/model/list.html')
self.assertIn(expected_msg, action_response.data)
article.reload()
self.assertTrue(article.is_public)
class CollectionAdminViewTests(BaseTestCase):
def test_admin_collection_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/collection/
Verificamos:
- o Collection criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
collection = makeOneCollection()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de collection
collection_list_response = client.get(url_for('collection.index_view'))
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao collection
self.assertIn(collection.id, collection_list_response.data.decode('utf-8'))
def test_admin_collection_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do Collection: /admin/collection/details/
Verificamos:
- a pagina mostra o Collection certo
"""
# with
collection = makeOneCollection()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_detail_url = url_for('collection.details_view', id=collection.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Collection
collection_detail_response = client.get(collection_detail_url)
self.assertStatus(collection_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao Collection
self.assertIn(collection.id, collection_detail_response.data.decode('utf-8'))
def test_admin_collection_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
expected_column_exclude_list = [
'_id', 'about', 'home_logo_pt', 'home_logo_es', 'home_logo_en',
'header_logo_pt', 'header_logo_es', 'header_logo_en',
'menu_logo_pt', 'menu_logo_es', 'menu_logo_en',
'logo_footer', 'logo_drop_menu'
]
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de collections
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_collection_check_form_excluded_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que contém todos os campos excluidos do formulario são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
expected_form_excluded_columns = ('acronym', 'metrics')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de collections
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
form_excluded_columns = self.get_context_variable('admin_view').form_excluded_columns
self.assertEqual(len(expected_form_excluded_columns), len(form_excluded_columns))
for expected_form_excluded_column in expected_form_excluded_columns:
self.assertIn(expected_form_excluded_column, form_excluded_columns)
def test_admin_collection_check_can_create_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/collection/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertFalse(can_create)
def test_admin_collection_check_can_edit_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertTrue(can_edit)
def test_admin_collection_check_can_delete_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertFalse(can_delete)
def test_admin_collection_check_create_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertTrue(create_modal)
def test_admin_collection_check_edit_modal_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de periódicos: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertTrue(edit_modal)
def test_admin_collection_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de collections: /admin/collection/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
collection_index_url = url_for('collection.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
collection_list_response = client.get(collection_index_url)
self.assertStatus(collection_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
class SponsorAdminViewTests(BaseTestCase):
def test_admin_sponsor_list_records(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Collection no banco
Quando:
- fazemos login e
- acessamos a pagina /admin/sponsor/
Verificamos:
- o Collection criado deve estar listado nessa página
- e o template utilizado é o esperado
"""
# with
sponsor = makeOneSponsor()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Sponsor
sponsor_list_response = client.get(url_for('sponsor.index_view'))
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# then
# verificamos a resposta
# que tem a id para acessar ao sponsor
self.assertIn(sponsor.id, sponsor_list_response.data.decode('utf-8'))
def test_admin_sponsor_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Sponsor no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do Sponsor: /admin/sponsor/details/
Verificamos:
- a pagina mostra o Sponsor certo
"""
# with
sponsor = makeOneSponsor()
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_detail_url = url_for('sponsor.details_view', id=sponsor.id)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# acesso a aba de Sponsor
sponsor_detail_response = client.get(sponsor_detail_url)
self.assertStatus(sponsor_detail_response, 200)
self.assertTemplateUsed('admin/model/details.html')
# then
# verificamos a resposta
# que tem a id para acessar ao Sponsor
self.assertIn(sponsor.id, sponsor_detail_response.data.decode('utf-8'))
def test_admin_sponsor_check_column_exclude_list(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos excluidos da listagem são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
expected_column_exclude_list = ('_id', )
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_exclude_list = self.get_context_variable('admin_view').column_exclude_list
self.assertEqual(len(expected_column_exclude_list), len(column_exclude_list))
for expected_excluded_field in expected_column_exclude_list:
self.assertIn(expected_excluded_field, column_exclude_list)
def test_admin_sponsor_check_form_excluded_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos excluidos do formulario são os esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
form_excluded_columns = self.get_context_variable('admin_view').form_excluded_columns
self.assertEqual(None, form_excluded_columns)
def test_admin_sponsor_check_can_create_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Sponsor: /admin/sponsor/
Verificamos:
- que não permite criar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_create = self.get_context_variable('admin_view').can_create
self.assertTrue(can_create)
def test_admin_sponsor_check_can_edit_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de Article: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de Sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_edit = self.get_context_variable('admin_view').can_edit
self.assertTrue(can_edit)
def test_admin_sponsor_check_can_delete_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que não permite apagar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_delete = self.get_context_variable('admin_view').can_delete
self.assertTrue(can_delete)
def test_admin_sponsor_check_create_modal_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsor: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
create_modal = self.get_context_variable('admin_view').create_modal
self.assertFalse(create_modal)
def test_admin_sponsor_check_edit_modal_is_false(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsor: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
edit_modal = self.get_context_variable('admin_view').edit_modal
self.assertFalse(edit_modal)
def test_admin_sponsor_check_can_view_details_is_true(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que não permite editar registros
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de periódicos
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
can_view_details = self.get_context_variable('admin_view').can_view_details
self.assertTrue(can_view_details)
def test_admin_sponsor_check_searchable_columns(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
Quando:
- fazemos login e
- acessamos a pagina de listagem de sponsors: /admin/sponsor/
Verificamos:
- que contém todos os campos de busca esperados
"""
# with
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
sponsor_index_url = url_for('sponsor.index_view')
expected_column_searchable_list = ('name',)
# when
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
# acesso a aba de sponsor
sponsor_list_response = client.get(sponsor_index_url)
self.assertStatus(sponsor_list_response, 200)
self.assertTemplateUsed('admin/model/list.html')
# verificamos os filtros da view
column_searchable_list = self.get_context_variable('admin_view').column_searchable_list
self.assertEqual(len(expected_column_searchable_list), len(column_searchable_list))
for expected_searchable_field in expected_column_searchable_list:
self.assertIn(expected_searchable_field, column_searchable_list)
class PagesAdminViewTests(BaseTestCase):
def test_admin_page_details(self):
"""
Com:
- usuário administrador cadastrado (com email confirmado)
- um novo registro do tipo: Journal no banco
Quando:
- fazemos login e
- acessamos a pagina de detalhe do periódico: /admin/journal/details/
Verificamos:
- a pagina mostra o periódico certo
"""
content = '<a href="http://www.scielo.br/avaliacao/faq_avaliacao_en.htm"><img src="http://www.scielo.br/img/revistas/abcd/glogo.gif">'
page = makeOnePage({'_id': 'xxxxx', 'content': content})
admin_user = {
'email': '[email protected]',
'password': 'foobarbaz',
}
create_user(admin_user['email'], admin_user['password'], True)
login_url = url_for('admin.login_view')
# when
with current_app.app_context():
with self.client as client:
# login do usuario admin
login_response = client.post(
login_url,
data=admin_user,
follow_redirects=True)
self.assertStatus(login_response, 200)
self.assertTemplateUsed('admin/index.html')
self.assertTrue(current_user.is_authenticated)
# edit pages
sent = {
'content': content * 2,
'slug_name': 'criterios',
'name': 'criterios',
'language': 'pt_BR',
'description': 'DESCRIIIIIIII',
}
response = client.post(
'/admin/pages/edit/?id={}'.format(page.id),
data=sent,
follow_redirects=True
)
self.assertTemplateUsed('admin/model/list.html')
self.assertStatus(response, 200)
self.assertIn(
'DESCRIIIIIIII', response.data.decode('utf-8'))
self.assertIn(
'success', response.data.decode('utf-8'))
url = url_for('pages.details_view', id=page.id)
response = client.get(url)
self.assertStatus(response, 200)
self.assertTemplateUsed('admin/model/details.html')
response_data = response.data.decode('utf-8')
self.assertIn(page.id, response_data)
self.assertIn(
"/avaliacao/faq_avaliacao_en.htm",
response_data)
self.assertIn(
"/img/revistas/abcd/glogo.gif",
response_data)
self.assertEqual(
response_data.count(
"/avaliacao/faq_avaliacao_en.htm"),
2)
self.assertEqual(
response_data.count(
"/img/revistas/abcd/glogo.gif"),
2)
| {
"content_hash": "4276b53e5a0c10e522d2d27dc706e9cf",
"timestamp": "",
"source": "github",
"line_count": 4778,
"max_line_length": 173,
"avg_line_length": 42.16366680619506,
"alnum_prop": 0.5333171182082618,
"repo_name": "scieloorg/opac",
"id": "5f27f0c85f60a7eea4480fb4b989dc2dc19c3219",
"size": "202154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opac/tests/test_admin_views.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "28018"
},
{
"name": "Dockerfile",
"bytes": "1669"
},
{
"name": "HTML",
"bytes": "250667"
},
{
"name": "JavaScript",
"bytes": "846068"
},
{
"name": "Less",
"bytes": "142376"
},
{
"name": "Makefile",
"bytes": "7235"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "888421"
},
{
"name": "Shell",
"bytes": "567"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from models import Book
admin.site.register(Book)
class BookAdmin(admin.ModelAdmin):
fields = ('title', 'price')
# Register your models here.
| {
"content_hash": "d87c2a3662ba060664ee957294b5ec99",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 34,
"avg_line_length": 20.22222222222222,
"alnum_prop": 0.7472527472527473,
"repo_name": "tlycken/whatiscode",
"id": "cb2089c09fc85d345a5cee5b80afaf48fd27f331",
"size": "182",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "research/sample/django-example/bookstore/admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "APL",
"bytes": "17"
},
{
"name": "C",
"bytes": "146"
},
{
"name": "CSS",
"bytes": "66825"
},
{
"name": "Common Lisp",
"bytes": "102"
},
{
"name": "FORTRAN",
"bytes": "245"
},
{
"name": "Forth",
"bytes": "66"
},
{
"name": "HTML",
"bytes": "821834"
},
{
"name": "JavaScript",
"bytes": "406913"
},
{
"name": "PHP",
"bytes": "127"
},
{
"name": "PostScript",
"bytes": "120477"
},
{
"name": "Python",
"bytes": "8678"
},
{
"name": "Ruby",
"bytes": "1579"
}
],
"symlink_target": ""
} |
import json
import os
from operator import itemgetter
import datetime
from flask import current_app as app
from app import get_db, s3_client
from flask_login import current_user
@app.template_filter("to_str")
def to_jobs(lst):
return ", ".join(json.loads(lst))
@app.template_filter("to_name")
def to_name(company_id):
comp = get_db().companies.find_one({"id": company_id}, {"name": 1})
return comp.get("name") if comp else None
@app.template_filter("get_info")
def get_info(students):
if students and all(bool(s) for s in students):
users = get_db().users.find({"id": {"$in": students}})
return list(users) if users else []
else:
return []
@app.template_filter("get_status")
def get_status(user):
types = [
"resume_workshop",
"interview_workshop",
"letter_workshop",
"fra_workshop",
"career_workshop",
]
if not user.get("job_center", {}).get("meetings"):
return "Débutant"
meetings = user.get("job_center", {}).get("meetings")
types_present = list(set(m["type"] for m in meetings))
if len(meetings) > 5 and all(e in types_present for e in types):
return "Expert"
elif all(e in types_present for e in types[:-1]):
return "Avancé"
elif all(e in types_present for e in types[:-3]):
return "Intérmédiaire"
else:
return "Débutant"
@app.template_filter("get_meeting")
def get_meeting(id, advisor=None):
if not advisor:
advisor = current_user.advisor
meeting = get_db().users.find_one(
{"id": current_user.advisor}, {"availabilities": {"$elemMatch": {"id": id}}}
)
meeting = meeting.get("availabilities")[0] if meeting.get("availabilities") else {}
return meeting
@app.template_filter("get_students")
def get_students(advisor_id):
students = list(get_db().users.find({"advisor": advisor_id}))
return students
@app.template_filter("sort_dict_by_key")
def sort_dict_by_key(l, key, reverse=False):
return sorted(l, key=itemgetter(key), reverse=reverse)
@app.template_filter('to_ambassador')
def to_ambassador(user_id):
return get_db().users.find_one({'id': user_id}, {'events.fra.ambassador': 1})['events']['fra'].get('ambassador')
@app.template_filter('to_companies')
def to_companies(day):
if day == 'mercredi':
duration = 'wed'
if day == 'jeudi':
duration = 'thu'
cur = get_db().companies.find({'duration': {'$in': [duration, 'both']}}, {
'id': 1, 'name': 1, 'pole': 1, f'ambassadors.{day}': 1, '_id': 0})
cur = list(cur)
cur = [l for l in cur if l['id'] != 'admin']
cur = [l for l in cur if l['id'] != 'test']
res = []
for c in cur:
is_filled = bool(c.get('ambassadors') and c.get('ambassadors').get(day))
d = {'id': c['id'], 'name': c['name'].lower().capitalize(), 'is_filled': is_filled}
if c.get('pole'):
res.append(d)
return res
| {
"content_hash": "27520a245b8378227d74379914a1d3b0",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 116,
"avg_line_length": 29.87878787878788,
"alnum_prop": 0.6085192697768763,
"repo_name": "ForumOrganisation/forumorg",
"id": "a418823a158cacebb9577840f9d7d2410b2e8cc0",
"size": "2963",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/users/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "570943"
},
{
"name": "Dockerfile",
"bytes": "422"
},
{
"name": "HTML",
"bytes": "645487"
},
{
"name": "JavaScript",
"bytes": "310986"
},
{
"name": "Python",
"bytes": "91339"
}
],
"symlink_target": ""
} |
"""Positive definite Operator defined with diagonal covariance."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import six
from tensorflow.contrib.distributions.python.ops import operator_pd
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
@six.add_metaclass(abc.ABCMeta)
class OperatorPDDiagBase(operator_pd.OperatorPDBase):
"""Base class for diagonal operators."""
def __init__(self, diag, verify_pd=True, name='OperatorPDDiagBase'):
self._verify_pd = verify_pd
self._name = name
with ops.name_scope(name):
with ops.op_scope([diag], 'init'):
self._diag = self._check_diag(diag)
def _check_diag(self, diag):
"""Verify that `diag` is positive."""
diag = ops.convert_to_tensor(diag, name='diag')
if not self.verify_pd:
return diag
deps = [check_ops.assert_positive(diag)]
return control_flow_ops.with_dependencies(deps, diag)
@property
def name(self):
"""String name identifying this `Operator`."""
return self._name
@property
def verify_pd(self):
"""Whether to verify that this `Operator` is positive definite."""
return self._verify_pd
@property
def dtype(self):
"""Data type of matrix elements of `A`."""
return self._diag.dtype
@property
def inputs(self):
"""Initialization arguments."""
return [self._diag]
def get_shape(self):
"""`TensorShape` giving static shape."""
# If d_shape = [5, 3], we return [5, 3, 3].
d_shape = self._diag.get_shape()
return d_shape.concatenate(d_shape[-1:])
def _shape(self):
d_shape = array_ops.shape(self._diag)
k = array_ops.gather(d_shape, array_ops.size(d_shape) - 1)
return array_ops.concat(0, (d_shape, [k]))
@abc.abstractmethod
def _batch_log_det(self):
pass
@abc.abstractmethod
def _inv_quadratic_form_on_vectors(self, x):
pass
@abc.abstractmethod
def _batch_matmul(self, x, transpose_x=False):
pass
@abc.abstractmethod
def _batch_sqrt_matmul(self, x, transpose_x=False):
pass
@abc.abstractmethod
def _batch_solve(self, rhs):
pass
@abc.abstractmethod
def _batch_sqrt_solve(self, rhs):
pass
@abc.abstractmethod
def _to_dense(self):
pass
@abc.abstractmethod
def _sqrt_to_dense(self):
pass
@abc.abstractmethod
def _add_to_tensor(self, mat):
pass
class OperatorPDDiag(OperatorPDDiagBase):
"""Class representing a (batch) of positive definite matrices `A`.
This class provides access to functions of a batch of symmetric positive
definite (PD) matrices `A` in `R^{k x k}`.
In this case, `A` is diagonal and is defined by a provided tensor `diag`,
`A_{ii} = diag[i]`.
Determinants, solves, and storage are `O(k)`.
In practice, this operator represents a (batch) matrix `A` with shape
`[N1,...,Nn, k, k]` for some `n >= 0`. The first `n` indices designate a
batch member. For every batch member `(i1,...,ib)`, `A[i1,...,ib, : :]` is
a `k x k` matrix.
For example,
```python
distributions = tf.contrib.distributions
diag = [1.0, 2.0]
operator = OperatorPDDiag(diag)
operator.det() # ==> (1 * 2)
# Compute the quadratic form x^T A^{-1} x for vector x.
x = [1.0, 2.0]
operator.inv_quadratic_form_on_vectors(x)
# Matrix multiplication by the square root, S w, with A = S S^T.
# Recall A is diagonal, and so then is S, with S_{ij} = sqrt(A_{ij}).
# If w is iid normal, S w has covariance A.
w = [[1.0],
[2.0]]
operator.sqrt_matmul(w)
```
The above three methods, `log_det`, `inv_quadratic_form_on_vectors`, and
`sqrt_matmul` provide "all" that is necessary to use a covariance matrix
in a multi-variate normal distribution. See the class
`MultivariateNormalDiag`.
"""
def __init__(self, diag, verify_pd=True, name='OperatorPDDiag'):
"""Initialize an OperatorPDDiag.
Args:
diag: Shape `[N1,...,Nn, k]` positive tensor with `n >= 0`, `k >= 1`.
verify_pd: Whether to check `diag` is positive.
name: A name to prepend to all ops created by this class.
"""
super(OperatorPDDiag, self).__init__(
diag, verify_pd=verify_pd, name=name)
def _batch_log_det(self):
return math_ops.reduce_sum(
math_ops.log(self._diag), reduction_indices=[-1])
def _inv_quadratic_form_on_vectors(self, x):
return self._iqfov_via_solve(x)
def _batch_matmul(self, x, transpose_x=False):
if transpose_x:
x = array_ops.batch_matrix_transpose(x)
diag_mat = array_ops.expand_dims(self._diag, -1)
return diag_mat * x
def _batch_sqrt_matmul(self, x, transpose_x=False):
if transpose_x:
x = array_ops.batch_matrix_transpose(x)
diag_mat = array_ops.expand_dims(self._diag, -1)
return math_ops.sqrt(diag_mat) * x
def _batch_solve(self, rhs):
diag_mat = array_ops.expand_dims(self._diag, -1)
return rhs / diag_mat
def _batch_sqrt_solve(self, rhs):
diag_mat = array_ops.expand_dims(self._diag, -1)
return rhs / math_ops.sqrt(diag_mat)
def _to_dense(self):
return array_ops.batch_matrix_diag(self._diag)
def _sqrt_to_dense(self):
return array_ops.batch_matrix_diag(math_ops.sqrt(self._diag))
def _add_to_tensor(self, mat):
mat_diag = array_ops.batch_matrix_diag_part(mat)
new_diag = self._diag + mat_diag
return array_ops.batch_matrix_set_diag(mat, new_diag)
class OperatorPDSqrtDiag(OperatorPDDiagBase):
"""Class representing a (batch) of positive definite matrices `A`.
This class provides access to functions of a batch of symmetric positive
definite (PD) matrices `A` in `R^{k x k}` defined by their square root,
`S`, such that `A = SS^T`.
In this case, `S` is diagonal and is defined by a provided tensor `diag`,
`S_{ii} = diag[i]`. As a result, `A` is diagonal with `A_{ii} = diag[i]**2`.
Determinants, solves, and storage are `O(k)`.
In practice, this operator represents a (batch) matrix `A` with shape
`[N1,...,Nn, k, k]` for some `n >= 0`. The first `n` indices designate a
batch member. For every batch member `(i1,...,ib)`, `A[i1,...,ib, : :]` is
a `k x k` matrix.
For example,
```python
distributions = tf.contrib.distributions
diag = [1.0, 2.0]
operator = OperatorPDSqrtDiag(diag)
operator.det() # ==> (1 * 2)**2
# Compute the quadratic form x^T A^{-1} x for vector x.
x = [1.0, 2.0]
operator.inv_quadratic_form_on_vectors(x)
# Matrix multiplication by the square root, S w.
# If w is iid normal, S w has covariance A.
w = [[1.0], [2.0]]
operator.sqrt_matmul(w)
```
The above three methods, `log_det`, `inv_quadratic_form_on_vectors`, and
`sqrt_matmul` provide "all" that is necessary to use a covariance matrix
in a multi-variate normal distribution. See the class
`MultivariateNormalDiag`.
"""
def __init__(self, diag, verify_pd=True, name='OperatorPDSqrtDiag'):
"""Initialize an OperatorPDSqrtDiag.
Args:
diag: Shape `[N1,...,Nn, k]` positive tensor with `n >= 0`, `k >= 1`.
verify_pd: Whether to check `diag` is positive.
name: A name to prepend to all ops created by this class.
"""
super(OperatorPDSqrtDiag, self).__init__(
diag, verify_pd=verify_pd, name=name)
def _batch_log_det(self):
return 2 * math_ops.reduce_sum(
math_ops.log(self._diag), reduction_indices=[-1])
def _inv_quadratic_form_on_vectors(self, x):
# This Operator is defined in terms of diagonal entries of the sqrt.
return self._iqfov_via_sqrt_solve(x)
def _batch_matmul(self, x, transpose_x=False):
if transpose_x:
x = array_ops.batch_matrix_transpose(x)
diag_mat = array_ops.expand_dims(self._diag, -1)
return math_ops.square(diag_mat) * x
def _batch_sqrt_matmul(self, x, transpose_x=False):
if transpose_x:
x = array_ops.batch_matrix_transpose(x)
diag_mat = array_ops.expand_dims(self._diag, -1)
return diag_mat * x
def _batch_solve(self, rhs):
diag_mat = array_ops.expand_dims(self._diag, -1)
return rhs / math_ops.square(diag_mat)
def _batch_sqrt_solve(self, rhs):
diag_mat = array_ops.expand_dims(self._diag, -1)
return rhs / diag_mat
def _to_dense(self):
return array_ops.batch_matrix_diag(math_ops.square(self._diag))
def _sqrt_to_dense(self):
return array_ops.batch_matrix_diag(self._diag)
def _add_to_tensor(self, mat):
mat_diag = array_ops.batch_matrix_diag_part(mat)
new_diag = math_ops.square(self._diag) + mat_diag
return array_ops.batch_matrix_set_diag(mat, new_diag)
| {
"content_hash": "cfd8e4316c2c2db49dc7af1882881ad1",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 79,
"avg_line_length": 30.527972027972027,
"alnum_prop": 0.6583438323216126,
"repo_name": "Lab603/PicEncyclopedias",
"id": "5e019355f74a3d72ef34876b94cbbc1addbc72a6",
"size": "9420",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "jni-build/jni/include/tensorflow/contrib/distributions/python/ops/operator_pd_diag.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "361482"
},
{
"name": "C++",
"bytes": "22994090"
},
{
"name": "CMake",
"bytes": "72924"
},
{
"name": "CSS",
"bytes": "1548"
},
{
"name": "HTML",
"bytes": "1040352"
},
{
"name": "Java",
"bytes": "252082"
},
{
"name": "JavaScript",
"bytes": "25902"
},
{
"name": "Jupyter Notebook",
"bytes": "3547008"
},
{
"name": "Makefile",
"bytes": "47206"
},
{
"name": "Objective-C",
"bytes": "10664"
},
{
"name": "Objective-C++",
"bytes": "91354"
},
{
"name": "Python",
"bytes": "19063444"
},
{
"name": "Shell",
"bytes": "476334"
},
{
"name": "TypeScript",
"bytes": "1264488"
}
],
"symlink_target": ""
} |
from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Supplier(models.Model):
"""
* A supplier holds all shop supplier related information
* A Supplier is only created by the system administrator
"""
user = models.ForeignKey(User, models.CASCADE)
name = models.CharField(max_length=100)
slug = models.SlugField(_(u"Slug"), unique=True, max_length=80)
active = models.BooleanField(default=True)
class Meta:
app_label = 'supplier'
def __str__(self):
return u"%s" % (self.name)
| {
"content_hash": "94e5886d6d7d32c17f290048000aee37",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 67,
"avg_line_length": 30.8,
"alnum_prop": 0.6866883116883117,
"repo_name": "diefenbach/django-lfs",
"id": "39b7eeae948998d9f70fb63ee40d7e1ff5c664da",
"size": "633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lfs/supplier/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "96584"
},
{
"name": "HTML",
"bytes": "616573"
},
{
"name": "JavaScript",
"bytes": "591609"
},
{
"name": "Python",
"bytes": "1425991"
}
],
"symlink_target": ""
} |
import oslo_messaging
from oslo_messaging import serializer as oslo_serializer
DEFAULT_URL = "__default__"
TRANSPORTS = {}
def setup():
oslo_messaging.set_transport_defaults('ceilometer')
def get_transport(conf, url=None, optional=False, cache=True):
"""Initialise the oslo_messaging layer."""
global TRANSPORTS, DEFAULT_URL
cache_key = url or DEFAULT_URL
transport = TRANSPORTS.get(cache_key)
if not transport or not cache:
try:
transport = oslo_messaging.get_transport(conf, url)
except (oslo_messaging.InvalidTransportURL,
oslo_messaging.DriverLoadFailure):
if not optional or url:
# NOTE(sileht): oslo_messaging is configured but unloadable
# so reraise the exception
raise
return None
else:
if cache:
TRANSPORTS[cache_key] = transport
return transport
def cleanup():
"""Cleanup the oslo_messaging layer."""
global TRANSPORTS, NOTIFIERS
NOTIFIERS = {}
for url in TRANSPORTS:
TRANSPORTS[url].cleanup()
del TRANSPORTS[url]
_SERIALIZER = oslo_serializer.JsonPayloadSerializer()
def get_batch_notification_listener(transport, targets, endpoints,
allow_requeue=False,
batch_size=1, batch_timeout=None):
"""Return a configured oslo_messaging notification listener."""
return oslo_messaging.get_batch_notification_listener(
transport, targets, endpoints, executor='threading',
allow_requeue=allow_requeue,
batch_size=batch_size, batch_timeout=batch_timeout)
def get_notifier(transport, publisher_id):
"""Return a configured oslo_messaging notifier."""
notifier = oslo_messaging.Notifier(transport, serializer=_SERIALIZER)
return notifier.prepare(publisher_id=publisher_id)
def convert_to_old_notification_format(priority, notification):
# FIXME(sileht): temporary convert notification to old format
# to focus on oslo_messaging migration before refactoring the code to
# use the new oslo_messaging facilities
notification = notification.copy()
notification['priority'] = priority
notification.update(notification["metadata"])
for k in notification['ctxt']:
notification['_context_' + k] = notification['ctxt'][k]
del notification['ctxt']
del notification['metadata']
return notification
| {
"content_hash": "0e3e502815c35563cd5e1da0f04db3d6",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 75,
"avg_line_length": 34.375,
"alnum_prop": 0.6658585858585858,
"repo_name": "ityaptin/ceilometer",
"id": "8f8211bede290ed64d648585d49f09841e24affd",
"size": "3104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceilometer/messaging.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2532735"
},
{
"name": "Shell",
"bytes": "29938"
}
],
"symlink_target": ""
} |
import smtplib
def sendMail(mail_executor,to_address,message):
"""mails the report of sysbench test"""
logging=mail_executor.logging
#configuring sender's login
from_address='[email protected]'
from_password='smtpprotocol'
#sending mail to specified to_addresses
logging.info("Mailing report...")
logging.info("To: %s" %to_address)
logging.info("From: %s" %from_address)
server=smtplib.SMTP('smtp.gmail.com',587)
server.ehlo()
server.starttls()
server.ehlo()
server.login(from_address,from_password)
server.sendmail(from_address,to_address,message)
logging.info("Report successfully sent...")
server.close()
| {
"content_hash": "85cdfadd7ff30c301bc1447d634dcd25",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 52,
"avg_line_length": 29.82608695652174,
"alnum_prop": 0.6953352769679301,
"repo_name": "pcrews/kewpie",
"id": "4acd8cfb68ebf70739fc2d221eb8c0465ba8caae",
"size": "1549",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lib/util/mailing_report.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "908"
},
{
"name": "C++",
"bytes": "22908"
},
{
"name": "Makefile",
"bytes": "843"
},
{
"name": "Perl",
"bytes": "1138410"
},
{
"name": "Perl6",
"bytes": "8691"
},
{
"name": "Python",
"bytes": "569830"
}
],
"symlink_target": ""
} |
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read data
#
reader = vtk.vtkGenericEnSightReader()
# Make sure all algorithms use the composite data pipeline
cdp = vtk.vtkCompositeDataPipeline()
reader.SetDefaultExecutivePrototype(cdp)
reader.SetCaseFileName("" + str(VTK_DATA_ROOT) + "/Data/EnSight/office_bin.case")
reader.Update()
outline = vtk.vtkStructuredGridOutlineFilter()
# outline SetInputConnection [reader GetOutputPort]
outline.SetInputData(reader.GetOutput().GetBlock(0))
mapOutline = vtk.vtkPolyDataMapper()
mapOutline.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(mapOutline)
outlineActor.GetProperty().SetColor(0,0,0)
# Create source for streamtubes
streamer = vtk.vtkStreamPoints()
# streamer SetInputConnection [reader GetOutputPort]
streamer.SetInputData(reader.GetOutput().GetBlock(0))
streamer.SetStartPosition(0.1,2.1,0.5)
streamer.SetMaximumPropagationTime(500)
streamer.SetTimeIncrement(0.5)
streamer.SetIntegrationDirectionToForward()
cone = vtk.vtkConeSource()
cone.SetResolution(8)
cones = vtk.vtkGlyph3D()
cones.SetInputConnection(streamer.GetOutputPort())
cones.SetSourceConnection(cone.GetOutputPort())
cones.SetScaleFactor(0.9)
cones.SetScaleModeToScaleByVector()
mapCones = vtk.vtkPolyDataMapper()
mapCones.SetInputConnection(cones.GetOutputPort())
# eval mapCones SetScalarRange [[reader GetOutput] GetScalarRange]
mapCones.SetScalarRange(reader.GetOutput().GetBlock(0).GetScalarRange())
conesActor = vtk.vtkActor()
conesActor.SetMapper(mapCones)
ren1.AddActor(outlineActor)
ren1.AddActor(conesActor)
ren1.SetBackground(0.4,0.4,0.5)
renWin.SetSize(300,300)
iren.Initialize()
# interact with data
reader.SetDefaultExecutivePrototype(None)
# --- end of script --
| {
"content_hash": "0ed836f036d535c0c0ff1e5f284e349f",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 81,
"avg_line_length": 35.872727272727275,
"alnum_prop": 0.811454637607704,
"repo_name": "gram526/VTK",
"id": "22183c080d3db2e86ee82945b23e63b3282d9896",
"size": "1995",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "IO/EnSight/Testing/Python/EnSightOfficeBin.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "37444"
},
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "C",
"bytes": "46637385"
},
{
"name": "C++",
"bytes": "68786084"
},
{
"name": "CMake",
"bytes": "1585187"
},
{
"name": "CSS",
"bytes": "186729"
},
{
"name": "Cuda",
"bytes": "29062"
},
{
"name": "D",
"bytes": "2081"
},
{
"name": "GAP",
"bytes": "14120"
},
{
"name": "GLSL",
"bytes": "205294"
},
{
"name": "Groff",
"bytes": "65394"
},
{
"name": "HTML",
"bytes": "292104"
},
{
"name": "Java",
"bytes": "147449"
},
{
"name": "JavaScript",
"bytes": "1130278"
},
{
"name": "Lex",
"bytes": "45258"
},
{
"name": "Objective-C",
"bytes": "22264"
},
{
"name": "Objective-C++",
"bytes": "191797"
},
{
"name": "Perl",
"bytes": "173168"
},
{
"name": "Prolog",
"bytes": "4406"
},
{
"name": "Python",
"bytes": "15706829"
},
{
"name": "Shell",
"bytes": "74255"
},
{
"name": "Slash",
"bytes": "1476"
},
{
"name": "Smarty",
"bytes": "1325"
},
{
"name": "Tcl",
"bytes": "1406812"
},
{
"name": "Yacc",
"bytes": "174481"
}
],
"symlink_target": ""
} |
"""El Nino dataset, 1950 - 2010"""
__docformat__ = 'restructuredtext'
COPYRIGHT = """This data is in the public domain."""
TITLE = """El Nino - Sea Surface Temperatures"""
SOURCE = """
National Oceanic and Atmospheric Administration's National Weather Service
ERSST.V3B dataset, Nino 1+2
http://www.cpc.ncep.noaa.gov/data/indices/
"""
DESCRSHORT = """Averaged monthly sea surface temperature - Pacific Ocean."""
DESCRLONG = """This data contains the averaged monthly sea surface
temperature in degrees Celcius of the Pacific Ocean, between 0-10 degrees South
and 90-80 degrees West, from 1950 to 2010. This dataset was obtained from
NOAA.
"""
NOTE = """::
Number of Observations - 61 x 12
Number of Variables - 1
Variable name definitions::
TEMPERATURE - average sea surface temperature in degrees Celcius
(12 columns, one per month).
"""
from numpy import recfromtxt, column_stack, array
from pandas import DataFrame
from statsmodels.datasets.utils import Dataset
from os.path import dirname, abspath
def load():
"""
Load the El Nino data and return a Dataset class.
Returns
-------
Dataset instance:
See DATASET_PROPOSAL.txt for more information.
Notes
-----
The elnino Dataset instance does not contain endog and exog attributes.
"""
data = _get_data()
names = data.dtype.names
dataset = Dataset(data=data, names=names)
return dataset
def load_pandas():
dataset = load()
dataset.data = DataFrame(dataset.data)
return dataset
def _get_data():
filepath = dirname(abspath(__file__))
with open(filepath + '/elnino.csv', 'rb') as f:
data = recfromtxt(f, delimiter=",",
names=True, dtype=float)
return data
| {
"content_hash": "6e83baf30308b95dcae1bdda65f5e091",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 24.37837837837838,
"alnum_prop": 0.6624168514412417,
"repo_name": "bert9bert/statsmodels",
"id": "35b0bcc1b7b2acdaa95dcdb07dcf2f2c24e75390",
"size": "1804",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "statsmodels/datasets/elnino/data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AGS Script",
"bytes": "457842"
},
{
"name": "Assembly",
"bytes": "10035"
},
{
"name": "Batchfile",
"bytes": "351"
},
{
"name": "C",
"bytes": "12088"
},
{
"name": "HTML",
"bytes": "148470"
},
{
"name": "Matlab",
"bytes": "2609"
},
{
"name": "Python",
"bytes": "9844784"
},
{
"name": "R",
"bytes": "55204"
},
{
"name": "Stata",
"bytes": "54989"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..utils import (
int_or_none,
ExtractorError,
sanitized_Request,
)
class VeohIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?veoh\.com/(?:watch|iphone/#_Watch)/(?P<id>(?:v|e|yapi-)[\da-zA-Z]+)'
_TESTS = [{
'url': 'http://www.veoh.com/watch/v56314296nk7Zdmz3',
'md5': '620e68e6a3cff80086df3348426c9ca3',
'info_dict': {
'id': '56314296',
'ext': 'mp4',
'title': 'Straight Backs Are Stronger',
'uploader': 'LUMOback',
'description': 'At LUMOback, we believe straight backs are stronger. The LUMOback Posture & Movement Sensor: It gently vibrates when you slouch, inspiring improved posture and mobility. Use the app to track your data and improve your posture over time. ',
},
}, {
'url': 'http://www.veoh.com/watch/v27701988pbTc4wzN?h1=Chile+workers+cover+up+to+avoid+skin+damage',
'md5': '4a6ff84b87d536a6a71e6aa6c0ad07fa',
'info_dict': {
'id': '27701988',
'ext': 'mp4',
'title': 'Chile workers cover up to avoid skin damage',
'description': 'md5:2bd151625a60a32822873efc246ba20d',
'uploader': 'afp-news',
'duration': 123,
},
'skip': 'This video has been deleted.',
}, {
'url': 'http://www.veoh.com/watch/v69525809F6Nc4frX',
'md5': '4fde7b9e33577bab2f2f8f260e30e979',
'note': 'Embedded ooyala video',
'info_dict': {
'id': '69525809',
'ext': 'mp4',
'title': 'Doctors Alter Plan For Preteen\'s Weight Loss Surgery',
'description': 'md5:f5a11c51f8fb51d2315bca0937526891',
'uploader': 'newsy-videos',
},
'skip': 'This video has been deleted.',
}, {
'url': 'http://www.veoh.com/watch/e152215AJxZktGS',
'only_matching': True,
}]
def _extract_formats(self, source):
formats = []
link = source.get('aowPermalink')
if link:
formats.append({
'url': link,
'ext': 'mp4',
'format_id': 'aow',
})
link = source.get('fullPreviewHashLowPath')
if link:
formats.append({
'url': link,
'format_id': 'low',
})
link = source.get('fullPreviewHashHighPath')
if link:
formats.append({
'url': link,
'format_id': 'high',
})
return formats
def _extract_video(self, source):
return {
'id': source.get('videoId'),
'title': source.get('title'),
'description': source.get('description'),
'thumbnail': source.get('highResImage') or source.get('medResImage'),
'uploader': source.get('username'),
'duration': int_or_none(source.get('length')),
'view_count': int_or_none(source.get('views')),
'age_limit': 18 if source.get('isMature') == 'true' or source.get('isSexy') == 'true' else 0,
'formats': self._extract_formats(source),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if video_id.startswith('v'):
rsp = self._download_xml(
r'http://www.veoh.com/api/findByPermalink?permalink=%s' % video_id, video_id, 'Downloading video XML')
stat = rsp.get('stat')
if stat == 'ok':
return self._extract_video(rsp.find('./videoList/video'))
elif stat == 'fail':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, rsp.find('./errorList/error').get('errorMessage')), expected=True)
webpage = self._download_webpage(url, video_id)
age_limit = 0
if 'class="adultwarning-container"' in webpage:
self.report_age_confirmation()
age_limit = 18
request = sanitized_Request(url)
request.add_header('Cookie', 'confirmedAdult=true')
webpage = self._download_webpage(request, video_id)
m_youtube = re.search(r'http://www\.youtube\.com/v/(.*?)(\&|"|\?)', webpage)
if m_youtube is not None:
youtube_id = m_youtube.group(1)
self.to_screen('%s: detected Youtube video.' % video_id)
return self.url_result(youtube_id, 'Youtube')
info = json.loads(
self._search_regex(r'videoDetailsJSON = \'({.*?})\';', webpage, 'info').replace('\\\'', '\''))
video = self._extract_video(info)
video['age_limit'] = age_limit
return video
| {
"content_hash": "cc9504fdbe68b2083050bc1dd5b0e64e",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 270,
"avg_line_length": 37.78740157480315,
"alnum_prop": 0.5384455094811419,
"repo_name": "C00kiie/Youtube-Mp3-telegram-bot",
"id": "b20dddc5c4eb1307e02da56efc6bff35c603933c",
"size": "4799",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "youtube_dl/extractor/veoh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4516411"
}
],
"symlink_target": ""
} |
import sys
from stop_words import stopword
import os
from nltk import stem
class gram:
def __init__(self,file_name,input_folder,output_folder,complete_file_name,max_gram,pos_filter_file,gram_ext):
self.doc_name=complete_file_name
#print self.doc_name
self.grams={}
f=open(self.doc_name,"r")
self.max_gram=max_gram
self.pos_filter_file=pos_filter_file
self.tokens=f.read()
#print self.tokens
self.tarray=self.tokens.split()
#print self.tarray
self.size=len(self.tarray)
#self.result_file=file_name.split(".")[0]+".grams"
self.result=output_folder+"/"+file_name.split("_")[0]+gram_ext
#print self.result
self.pos_filter={}
#(self.doc_name).split("/")[1]
#self.result="result/"+self.result.split(".")[0]+".grams"
#self.one_g="result/"+self.result.split(".")[0]+".onegrams"
self.final_grams=[]
def extract_gram(self,n):
for i in range(len(self.tarray)-n):
flag=0
d=""
token=[]
tag=[]
for j in range(n):
token.append(self.tarray[i+j].split("/")[0])
tag.append(self.tarray[i+j].split("/")[1])
if (not is_number(token[j]) and token[j]!=" " and token[j]!=""):
d=d+token[j]+" "
else:
flag=1
break
d=d.rstrip(" ")
word=d.split()
l=len(word)
if (flag==0 and d not in self.final_grams):
if (word[0].strip() not in stopword and word[l-1].strip() not in stopword and len(word[0])>2 and len(word[l-1])>2):
if (n in self.pos_filter.keys()):
pos_eligible=self.pos_filtering(n,token,tag)
if pos_eligible:
#print d
(self.final_grams).append(d)
else:
self.final_grams.append(d)
#print "Filter Not Available";
#break
def pos_filtering(self,n,token,tag):
for filt in self.pos_filter[n]:
filt_token=filt.split(",")
satisfied=0
for i in range(n):
if tag[i].startswith(filt_token[i]) or filt_token[i]=='ANY':
satisfied=1
else:
satisfied=0
break
if satisfied==1:
break
return(satisfied)
def get_pos_filter(self):
f_pos=open(self.pos_filter_file,"r")
pos_content=f_pos.readlines()
for pos in pos_content:
pos=pos.rstrip("\n")
pos=pos.rstrip("\r")
filter_size=len(pos.split(","))
#print pos,filter_size
if filter_size not in self.pos_filter.keys():
self.pos_filter[filter_size]=[pos,]
else:
self.pos_filter[filter_size].append(pos)
#print self.pos_filter
def write_to_file(self):
f=open(self.result,"w")
count=0
for k in (self.final_grams):
f.write(k.replace("'","")+"\n")
f.close()
#os.system('chmod 777 '+self.result)
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def repeated(token):
cnt={}
repeat=0
for word in token:
try:
cnt[word]+=1
repeat=1
#print word+" repeated"
break
except:
cnt[word]=1
return repeat
def stem_token(stemmer_name,stopword):
if stemmer_name=="Porter-Stemmer":
#print ("Performing Porter Stemming")
stemmer = stem.PorterStemmer()
for count in range(len(stopword)):
stopword[count]=stemmer.stem(stopword[count])
elif stemmer_name=="Lancaster-Stemmer":
#print ("Performing Lancaster Stemming")
stemmer = stem.LancasterStemmer()
for count in range(len(stopword)):
stopword[count]=stemmer.stem(stopword[count])
elif stemmer_name=="WordNet-Lemmatizer":
#print ("Performing Wordnet Lemmatization")
stemmer = WordNetLemmatizer()
for count in range(len(stopword)):
stopword[count]=stemmer.lemmatize(stopword[count])
return(stopword)
try:
file_name=sys.argv[1]
file_name_list=file_name.split(".")
file_name=file_name_list[0]+"_tagged.txt"
#print file_name
#input_folder=sys.argv[2]
input_folder="output"
#print input_folder
complete_file_name=input_folder+"/"+file_name
#print complete_file_name
#output_folder=sys.argv[3]
output_folder="output"
#print output_folder
max_gram=int(sys.argv[2])
#print max_gram
pos_filter_file=sys.argv[3]
#print pos_filter_file
stemmer_name=sys.argv[4]
gram_ext="_grams.txt"
except:
print "No argument"
sys.exit()
stopword=stem_token(stemmer_name,stopword)
g=gram(file_name,input_folder,output_folder,complete_file_name,max_gram,pos_filter_file,gram_ext)
g.get_pos_filter()
#g.extract_gram(2)
for i in range(max_gram,0,-1):
#print i
g.extract_gram(i)
g.write_to_file()
| {
"content_hash": "cb2e963f78f8f8ce31ac7402db56823a",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 119,
"avg_line_length": 26.666666666666668,
"alnum_prop": 0.6583333333333333,
"repo_name": "amudalab/concept-graphs",
"id": "3c615cfc24a30d5b0ecfc31cf31c25736eebbc41",
"size": "4470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keyphrase/keyphrase/grams_pos.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1357"
},
{
"name": "Processing",
"bytes": "9122"
},
{
"name": "Python",
"bytes": "481015"
},
{
"name": "Shell",
"bytes": "18777"
}
],
"symlink_target": ""
} |
import pytest
import os
from csv import Sniffer
from natural_bm import callbacks
from natural_bm import optimizers
from natural_bm import training
from natural_bm.models import Model
from natural_bm.datasets import random
from natural_bm.utils_testing import nnet_for_testing
#%%
@pytest.mark.parametrize('sep', [',', '\t'], ids=['csv', 'tsv'])
def test_CSVLogger(sep):
"""
This test is a slight modification of test_CSVLogger from
https://github.com/fchollet/keras/blob/master/tests/keras/test_callbacks.py
"""
nnet = nnet_for_testing('rbm')
data = random.Random('probability')
batch_size = 6
n_epoch = 1
if sep == '\t':
filepath = 'log.tsv'
elif sep == ',':
filepath = 'log.csv'
def make_model(dbm, data):
optimizer = optimizers.SGD()
trainer = training.CD(dbm)
model = Model(dbm, optimizer, trainer)
return model
# case 1, create new file with defined separator
model = make_model(nnet, data)
cbks = [callbacks.CSVLogger(filepath, separator=sep)]
history = model.fit(data.train.data,
batch_size=batch_size,
n_epoch=n_epoch,
callbacks=cbks,
validation_data=data.valid.data)
assert os.path.exists(filepath)
with open(filepath) as csvfile:
dialect = Sniffer().sniff(csvfile.read())
assert dialect.delimiter == sep
del model
del cbks
# case 2, append data to existing file, skip header
model = make_model(nnet, data)
cbks = [callbacks.CSVLogger(filepath, separator=sep, append=True)]
history = model.fit(data.train.data,
batch_size=batch_size,
n_epoch=n_epoch,
callbacks=cbks,
validation_data=data.valid.data)
# case 3, reuse of CSVLogger object
history = model.fit(data.train.data,
batch_size=batch_size,
n_epoch=n_epoch,
callbacks=cbks,
validation_data=data.valid.data)
import re
with open(filepath) as csvfile:
output = " ".join(csvfile.readlines())
assert len(re.findall('epoch', output)) == 1
os.remove(filepath)
#%% Main
if __name__ == '__main__':
pytest.main([__file__])
| {
"content_hash": "df1a91763d904a61b9608c9f38f3619e",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 79,
"avg_line_length": 28.38095238095238,
"alnum_prop": 0.5838926174496645,
"repo_name": "alexhunterlang/natural_bm",
"id": "4d9730f5aef0bd8d322583a08360223da84a5f3c",
"size": "2388",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/natural_bm/test_callbacks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "215690"
}
],
"symlink_target": ""
} |
from bokeh.io import push_notebook, show, output_notebook
import matplotlib.pyplot as plt
import numpy as np
from bokeh.io import push_notebook, show, output_notebook
from bokeh.plotting import figure
from pextant.lib.geoshapely import GeoPolygon
class TriExpandViz(object):
def __init__(self, env_model, start_point, end_point, counter_interval=10):
self.mesh = env_model.dataset.mesh
self.env_model = env_model
#self.points = GeoPolygon([start_point, end_point])
self.y, self.x = self.mesh.vertices[:, :2].transpose()
self.zfaces = self.mesh.triangles_center[:, 2]
self.counter = 0
self.counter_interval = counter_interval
def draw(self, x = (), y=()):
#px,py = self.points.to(self.env_model.ROW_COL)
plt.tripcolor(self.x, self.y, self.mesh.faces, facecolors=self.zfaces, edgecolors='k')
if len(x) != 0:
plt.plot(x, y)
plt.axis('equal')
plt.show()
def addcount(self):
self.counter += 1
if self.counter % self.counter_interval == 0:
print self.counter
if self.counter % self.counter_interval == 0:
self.draw()
def add(self, state, cost):
self.zfaces[state] = cost
class ExpandViz(object):
def __init__(self, env_model, counter_interval=1000):
self.env_model = env_model
self.expandedgrid = np.zeros((env_model.y_size, env_model.x_size))
self.counter = 0
self.counter_interval = counter_interval
self.expanded = []
#cmap = 'viridis'
def draw(self):
expanded = np.array(self.expanded).transpose()
gp_expanded = GeoPolygon(self.env_model.ROW_COL,*expanded)
upper_left, lower_right = gp_expanded.geoEnvelope()
upper_row, left_col = upper_left.to(self.env_model.ROW_COL)
lower_row, right_col = lower_right.to(self.env_model.ROW_COL)
plt.matshow(self.expandedgrid[upper_row:lower_row+1,left_col:right_col+1])
print((upper_row, lower_row), (left_col,right_col))
#print(waypoints.to(env_model.COL_ROW))
#plt.scatter(*waypoints.to(env_model.COL_ROW), c='r')
plt.show()
def drawsolution(self, rawpoints):
np_rawpoints = GeoPolygon(self.env_model.ROW_COL, *np.array(rawpoints).transpose())
plt.matshow(self.env_model.dataset)
#plt.scatter(*waypoints.to(env_model.COL_ROW), c='r')
plt.scatter(*np_rawpoints.to(self.env_model.COL_ROW), c='b')
plt.show()
def addcount(self):
self.counter += 1
if self.counter % 1000 == 0:
print self.counter
if self.counter % self.counter_interval == 0:
self.draw()
def add(self, state, cost):
self.expanded.append(np.array(state))
self.expandedgrid[state] = cost
class MeshViz:
def __init__(self, notebook=False):
self.notebook = notebook
if notebook:
output_notebook()
def viz(self, mesh, x=None, y=None, palette="Spectral11", viz=True, type="line"):
dh, dw = mesh.shape
size = max(dh, dw)
self.mesh = mesh
self.dh = dh
self.dw = dw
self.p = figure(webgl=True, title="MD2", x_axis_label='x', y_axis_label='y', x_range=[0, size], y_range=[0, size])
self.p.image(image=[mesh[::-1, :]], x=0, y=0, dw=dw, dh=dh, palette=palette)
if not x is None:
if type=="line":
self.p.line(x, self.dh - np.array(y), line_color="green", line_width=3)
else:
self.p.circle(x, self.dh - np.array(y), fill_color="yellow", line_color="black", size=10)
if self.notebook and viz:
self.t = show(self.p, notebook_handle = self.notebook)
else:
#self.t = show(self.p)
pass
def show(self):
self.t = show(self.p, notebook_handle = self.notebook)
def vizpoints(self, x, y):
print(x)
self.p.circle(y, self.dh - np.array(x), fill_color="yellow", size=10)
push_notebook(handle=self.t)
class MeshVizM:
def __init__(self):
pass
def viz(self, mesh, x=None, y=None):
plt.matshow(mesh)
plt.show()
if __name__ == '__main__':
MeshViz().viz(np.zeros([4,4])) | {
"content_hash": "066edcee2223d62663a97ac10c6e9c9a",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 122,
"avg_line_length": 34.707317073170735,
"alnum_prop": 0.5921761536659639,
"repo_name": "norheim/pextant",
"id": "799e8c16f9f03ccc91502e546680f4a6f3ed567e",
"size": "4269",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pextant/mesh/MeshVisualizer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "215644"
}
],
"symlink_target": ""
} |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "smallearth.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| {
"content_hash": "d1e6703e7cf0a18e1667a7c19ca4147d",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 74,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.7142857142857143,
"repo_name": "robertwhaskell/smallearth",
"id": "290508c26483236de9df425745ed64df9c9d6755",
"size": "253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smallearth/manage.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "958"
},
{
"name": "Python",
"bytes": "4726"
}
],
"symlink_target": ""
} |
from flask_assets import Bundle
from flask_assets import Environment
def init_app(app):
assets = Environment(app)
styles = Bundle(
'main.scss',
filters='scss',
output='main.css',
depends='**/*.scss'
)
scripts = Bundle(
'*.js',
filters=('slimit'),
output='main.js'
)
assets.register('styles', styles)
assets.register('scripts', scripts)
# TODO: Move this config to an environment file
assets.load_path = ['service/design/styles', 'service/design/scripts']
assets.config['SASS_STYLE'] = 'compressed'
assets.url_expire = False
assets.auto_build = True
| {
"content_hash": "240286a02523b78a1fd41d67aa368c32",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 74,
"avg_line_length": 23.535714285714285,
"alnum_prop": 0.6084977238239757,
"repo_name": "dallbee/Bytelog",
"id": "18945ad63c4f4286186477bccdf234ec1fc16f60",
"size": "659",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service/assets.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "2054"
},
{
"name": "Python",
"bytes": "8317"
}
],
"symlink_target": ""
} |
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class GpuProcessTestsPage(page_module.Page):
def __init__(self, url, name, page_set):
super(GpuProcessTestsPage, self).__init__(url=url, page_set=page_set,
name=name)
self.user_agent_type = 'desktop'
class FunctionalVideoPage(GpuProcessTestsPage):
def __init__(self, page_set):
super(FunctionalVideoPage, self).__init__(
url='file://../../data/gpu/functional_video.html',
name='GpuProcess.video',
page_set=page_set)
def RunNavigateSteps(self, action_runner):
super(FunctionalVideoPage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition(
'domAutomationController._finished', timeout_in_seconds=30)
class GpuInfoCompletePage(GpuProcessTestsPage):
def __init__(self, page_set):
super(GpuInfoCompletePage, self).__init__(
url='file://../../data/gpu/functional_3d_css.html',
name='GpuProcess.gpu_info_complete',
page_set=page_set)
def Validate(self, tab, results):
# Regression test for crbug.com/454906
if not tab.browser.supports_system_info:
raise page_test.Failure('Browser must support system info')
system_info = tab.browser.GetSystemInfo()
if not system_info.gpu:
raise page_test.Failure('Target machine must have a GPU')
if not system_info.gpu.aux_attributes:
raise page_test.Failure('Browser must support GPU aux attributes')
if not 'gl_renderer' in system_info.gpu.aux_attributes:
raise page_test.Failure('Browser must have gl_renderer in aux attribs')
if len(system_info.gpu.aux_attributes['gl_renderer']) <= 0:
raise page_test.Failure('Must have a non-empty gl_renderer string')
class GpuProcessTestsPageSet(page_set_module.PageSet):
""" Tests that accelerated content triggers the creation of a GPU process """
def __init__(self):
super(GpuProcessTestsPageSet, self).__init__(
serving_dirs=set(['../../../../content/test/data']),
user_agent_type='desktop')
urls_and_names_list = [
('file://../../data/gpu/functional_canvas_demo.html',
'GpuProcess.canvas2d'),
('file://../../data/gpu/functional_3d_css.html',
'GpuProcess.css3d'),
('file://../../data/gpu/functional_webgl.html',
'GpuProcess.webgl')
]
for url, name in urls_and_names_list:
self.AddUserStory(GpuProcessTestsPage(url, name, self))
self.AddUserStory(FunctionalVideoPage(self))
self.AddUserStory(GpuInfoCompletePage(self))
| {
"content_hash": "38e77e40b1605ab6c0d8b6095c16faf2",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 79,
"avg_line_length": 36.70422535211268,
"alnum_prop": 0.6761320030698388,
"repo_name": "Fireblend/chromium-crosswalk",
"id": "a93f1039accd793483c117701c587e2ba03debc9",
"size": "2768",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "content/test/gpu/page_sets/gpu_process_tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "34367"
},
{
"name": "Batchfile",
"bytes": "8451"
},
{
"name": "C",
"bytes": "9397825"
},
{
"name": "C++",
"bytes": "235052525"
},
{
"name": "CSS",
"bytes": "951745"
},
{
"name": "DM",
"bytes": "60"
},
{
"name": "Emacs Lisp",
"bytes": "988"
},
{
"name": "Groff",
"bytes": "2494"
},
{
"name": "HTML",
"bytes": "29070071"
},
{
"name": "Java",
"bytes": "10089056"
},
{
"name": "JavaScript",
"bytes": "20170506"
},
{
"name": "Makefile",
"bytes": "68234"
},
{
"name": "Objective-C",
"bytes": "1639405"
},
{
"name": "Objective-C++",
"bytes": "9478782"
},
{
"name": "PHP",
"bytes": "97817"
},
{
"name": "PLpgSQL",
"bytes": "178732"
},
{
"name": "Perl",
"bytes": "63937"
},
{
"name": "Protocol Buffer",
"bytes": "465313"
},
{
"name": "Python",
"bytes": "8146950"
},
{
"name": "Shell",
"bytes": "473684"
},
{
"name": "Standard ML",
"bytes": "5034"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "18347"
}
],
"symlink_target": ""
} |
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
from .._serialization import Serializer
from .._vendor import MixinABC, _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_post_request(reservation_order_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-03-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop("template_url", "/providers/Microsoft.Capacity/reservationOrders/{reservationOrderId}/return")
path_format_arguments = {
"reservationOrderId": _SERIALIZER.url("reservation_order_id", reservation_order_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
class ReturnOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.reservations.AzureReservationAPI`'s
:attr:`return_operations` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@overload
def post(
self,
reservation_order_id: str,
body: _models.RefundRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.RefundResponse:
"""Return a reservation.
Return a reservation.
:param reservation_order_id: Order Id of the reservation. Required.
:type reservation_order_id: str
:param body: Information needed for returning reservation. Required.
:type body: ~azure.mgmt.reservations.models.RefundRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RefundResponse or the result of cls(response)
:rtype: ~azure.mgmt.reservations.models.RefundResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def post(
self, reservation_order_id: str, body: IO, *, content_type: str = "application/json", **kwargs: Any
) -> _models.RefundResponse:
"""Return a reservation.
Return a reservation.
:param reservation_order_id: Order Id of the reservation. Required.
:type reservation_order_id: str
:param body: Information needed for returning reservation. Required.
:type body: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RefundResponse or the result of cls(response)
:rtype: ~azure.mgmt.reservations.models.RefundResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def post(
self, reservation_order_id: str, body: Union[_models.RefundRequest, IO], **kwargs: Any
) -> _models.RefundResponse:
"""Return a reservation.
Return a reservation.
:param reservation_order_id: Order Id of the reservation. Required.
:type reservation_order_id: str
:param body: Information needed for returning reservation. Is either a model type or a IO type.
Required.
:type body: ~azure.mgmt.reservations.models.RefundRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RefundResponse or the result of cls(response)
:rtype: ~azure.mgmt.reservations.models.RefundResponse
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-03-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.RefundResponse]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(body, (IO, bytes)):
_content = body
else:
_json = self._serialize.body(body, "RefundRequest")
request = build_post_request(
reservation_order_id=reservation_order_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.post.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers["Location"] = self._deserialize("str", response.headers.get("Location"))
deserialized = self._deserialize("RefundResponse", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
post.metadata = {"url": "/providers/Microsoft.Capacity/reservationOrders/{reservationOrderId}/return"} # type: ignore
| {
"content_hash": "b7bd042bf16aa587303c69cae2db99a5",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 122,
"avg_line_length": 41.215,
"alnum_prop": 0.6617736261069999,
"repo_name": "Azure/azure-sdk-for-python",
"id": "ac22a2f6b15884eccf44dc1552cc422f29e5587f",
"size": "8743",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/reservations/azure-mgmt-reservations/azure/mgmt/reservations/operations/_return_operations_operations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
from itertools import product
from lasagne.layers import get_output
import matplotlib.pyplot as plt
import numpy as np
import theano
import theano.tensor as T
def plot_loss(net):
train_loss = [row['train_loss'] for row in net.train_history_]
valid_loss = [row['valid_loss'] for row in net.train_history_]
plt.plot(train_loss, label='train loss')
plt.plot(valid_loss, label='valid loss')
plt.xlabel('epoch')
plt.ylabel('loss')
plt.legend(loc='best')
def plot_conv_weights(layer, figsize=(6, 6)):
"""Plot the weights of a specific layer.
Only really makes sense with convolutional layers.
Parameters
----------
layer : lasagne.layers.Layer
"""
W = layer.W.get_value()
shape = W.shape
nrows = np.ceil(np.sqrt(shape[0])).astype(int)
ncols = nrows
for feature_map in range(shape[1]):
figs, axes = plt.subplots(nrows, ncols, figsize=figsize)
for ax in axes.flatten():
ax.set_xticks([])
ax.set_yticks([])
ax.axis('off')
for i, (r, c) in enumerate(product(range(nrows), range(ncols))):
if i >= shape[0]:
break
axes[r, c].imshow(W[i, feature_map], cmap='gray',
interpolation='nearest')
def plot_conv_activity(layer, x, figsize=(6, 8)):
"""Plot the acitivities of a specific layer.
Only really makes sense with layers that work 2D data (2D
convolutional layers, 2D pooling layers ...).
Parameters
----------
layer : lasagne.layers.Layer
x : numpy.ndarray
Only takes one sample at a time, i.e. x.shape[0] == 1.
"""
if x.shape[0] != 1:
raise ValueError("Only one sample can be plotted at a time.")
# compile theano function
xs = T.tensor4('xs').astype(theano.config.floatX)
get_activity = theano.function([xs], get_output(layer, xs))
activity = get_activity(x)
shape = activity.shape
nrows = np.ceil(np.sqrt(shape[1])).astype(int)
ncols = nrows
figs, axes = plt.subplots(nrows + 1, ncols, figsize=figsize)
axes[0, ncols // 2].imshow(1 - x[0][0], cmap='gray',
interpolation='nearest')
axes[0, ncols // 2].set_title('original')
for ax in axes.flatten():
ax.set_xticks([])
ax.set_yticks([])
ax.axis('off')
for i, (r, c) in enumerate(product(range(nrows), range(ncols))):
if i >= shape[1]:
break
ndim = activity[0][i].ndim
if ndim != 2:
raise ValueError("Wrong number of dimensions, image data should "
"have 2, instead got {}".format(ndim))
axes[r + 1, c].imshow(-activity[0][i], cmap='gray',
interpolation='nearest')
def occlusion_heatmap(net, x, target, square_length=7):
"""An occlusion test that checks an image for its critical parts.
In this function, a square part of the image is occluded (i.e. set
to 0) and then the net is tested for its propensity to predict the
correct label. One should expect that this propensity shrinks of
critical parts of the image are occluded. If not, this indicates
overfitting.
Depending on the depth of the net and the size of the image, this
function may take awhile to finish, since one prediction for each
pixel of the image is made.
Currently, all color channels are occluded at the same time. Also,
this does not really work if images are randomly distorted by the
batch iterator.
See paper: Zeiler, Fergus 2013
Parameters
----------
net : NeuralNet instance
The neural net to test.
x : np.array
The input data, should be of shape (1, c, x, y). Only makes
sense with image data.
target : int
The true value of the image. If the net makes several
predictions, say 10 classes, this indicates which one to look
at.
square_length : int (default=7)
The length of the side of the square that occludes the image.
Must be an odd number.
Results
-------
heat_array : np.array (with same size as image)
An 2D np.array that at each point (i, j) contains the predicted
probability of the correct class if the image is occluded by a
square with center (i, j).
"""
if (x.ndim != 4) or x.shape[0] != 1:
raise ValueError("This function requires the input data to be of "
"shape (1, c, x, y), instead got {}".format(x.shape))
if square_length % 2 == 0:
raise ValueError("Square length has to be an odd number, instead "
"got {}.".format(square_length))
num_classes = net.layers_[-1].num_units
img = x[0].copy()
bs, col, s0, s1 = x.shape
heat_array = np.zeros((s0, s1))
pad = square_length // 2 + 1
x_occluded = np.zeros((s1, col, s0, s1), dtype=img.dtype)
probs = np.zeros((s0, s1, num_classes))
# generate occluded images
for i in range(s0):
# batch s1 occluded images for faster prediction
for j in range(s1):
x_pad = np.pad(img, ((0, 0), (pad, pad), (pad, pad)), 'constant')
x_pad[:, i:i + square_length, j:j + square_length] = 0.
x_occluded[j] = x_pad[:, pad:-pad, pad:-pad]
y_proba = net.predict_proba(x_occluded)
probs[i] = y_proba.reshape(s1, num_classes)
# from predicted probabilities, pick only those of target class
for i in range(s0):
for j in range(s1):
heat_array[i, j] = probs[i, j, target]
return heat_array
def plot_occlusion(net, X, target, square_length=7, figsize=(9, None)):
"""Plot which parts of an image are particularly import for the
net to classify the image correctly.
See paper: Zeiler, Fergus 2013
Parameters
----------
net : NeuralNet instance
The neural net to test.
X : numpy.array
The input data, should be of shape (b, c, 0, 1). Only makes
sense with image data.
target : list or numpy.array of ints
The true values of the image. If the net makes several
predictions, say 10 classes, this indicates which one to look
at. If more than one sample is passed to X, each of them needs
its own target.
square_length : int (default=7)
The length of the side of the square that occludes the image.
Must be an odd number.
figsize : tuple (int, int)
Size of the figure.
Plots
-----
Figure with 3 subplots: the original image, the occlusion heatmap,
and both images super-imposed.
"""
if (X.ndim != 4):
raise ValueError("This function requires the input data to be of "
"shape (b, c, x, y), instead got {}".format(X.shape))
num_images = X.shape[0]
if figsize[1] is None:
figsize = (figsize[0], num_images * figsize[0] / 3)
figs, axes = plt.subplots(num_images, 3, figsize=figsize)
for ax in axes.flatten():
ax.set_xticks([])
ax.set_yticks([])
ax.axis('off')
for n in range(num_images):
heat_img = occlusion_heatmap(
net, X[n:n + 1, :, :, :], target[n], square_length
)
ax = axes if num_images == 1 else axes[n]
img = X[n, :, :, :].mean(0)
ax[0].imshow(-img, interpolation='nearest', cmap='gray')
ax[0].set_title('image')
ax[1].imshow(-heat_img, interpolation='nearest', cmap='Reds')
ax[1].set_title('critical parts')
ax[2].imshow(-img, interpolation='nearest', cmap='gray')
ax[2].imshow(-heat_img, interpolation='nearest', cmap='Reds',
alpha=0.6)
ax[2].set_title('super-imposed')
| {
"content_hash": "9d8680b78be82fda2a2c64db8d670d49",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 78,
"avg_line_length": 32.64556962025316,
"alnum_prop": 0.5971306708026367,
"repo_name": "williford/nolearn",
"id": "c73ee97d6eb94887b6c3c801ee55a558c8a1cd59",
"size": "7737",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nolearn/lasagne/visualize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "116134"
}
],
"symlink_target": ""
} |
class Solution:
# @param {int[]} A an integer array
# @return {int[]} A list of integers includes the index of the
# first number and the index of the last number
def continuousSubarraySum(self, A):
# Write your code here | {
"content_hash": "4b5a784cb7d290e0144ee12bd3494df5",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 68,
"avg_line_length": 44,
"alnum_prop": 0.6212121212121212,
"repo_name": "quake0day/oj",
"id": "795e2da195f065df490bdcee56e2fc64fe1e02ab",
"size": "264",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Continuous Subarray Sum.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5897"
},
{
"name": "Java",
"bytes": "691"
},
{
"name": "JavaScript",
"bytes": "1131"
},
{
"name": "Python",
"bytes": "294929"
}
],
"symlink_target": ""
} |
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
# models
from users.models import User
class CustomBackend(ModelBackend):
"""
Authenticates against users.models.User
"""
supports_inactive_user = True
def authenticate(self, email, password, token=None):
""" login using the username validating with the password or the
token. If the token is used, then it's deleted
"""
UserModel = get_user_model()
if email:
# stirp and lower the email, since it should be case insensitive
# and emails don't have spaces
email = email.strip().lower()
try:
user = UserModel._default_manager.get_by_natural_key(email)
except UserModel.DoesNotExist:
return None
if password is not None:
if user.check_password(password):
return user
if token:
if user.token == token and len(token) == 30:
user.token = ""
user.is_active = True
user.save()
return user
return None
def get_user(self, user_id):
""" returns the user using the id """
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| {
"content_hash": "aaab8fd037abd460a6f55eb0f78cad40",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 76,
"avg_line_length": 29.717391304347824,
"alnum_prop": 0.5808339429407462,
"repo_name": "magnet-cl/django-project-template-py3",
"id": "eb4ab3518d3d310054ec28ec0dc809f96653ad19",
"size": "1376",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "users/backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7184"
},
{
"name": "HTML",
"bytes": "30358"
},
{
"name": "JavaScript",
"bytes": "11986"
},
{
"name": "Python",
"bytes": "143879"
},
{
"name": "Shell",
"bytes": "5712"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import shutil
import tempfile
import time
from unittest import TestCase
from po_localization.file_watcher import FileWatcher
class FileWatcherTestCase(TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_empty(self):
operator = TestOperator()
file_watcher = FileWatcher(operator)
file_watcher.check()
self.assertEqual(1, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
file_watcher.check()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
file_watcher.set_dirty()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
file_watcher.check()
self.assertEqual(3, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
file_watcher.check()
self.assertEqual(4, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
def test_add_file_to_list(self):
file_path = os.path.join(self.temp_dir, 'file.ext')
with open(file_path, 'w'):
pass
operator = TestOperator()
file_watcher = FileWatcher(operator)
file_watcher.check()
self.assertEqual(1, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
operator.files_list = (__file__,)
file_watcher.check()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
def test_touch_file(self):
file_path = os.path.join(self.temp_dir, 'file.ext')
with open(file_path, 'w'):
pass
start_time = time.time()
operator = TestOperator((file_path,))
os.utime(file_path, (0, start_time))
file_watcher = FileWatcher(operator)
file_watcher.check()
self.assertEqual(1, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
file_watcher.check()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
os.utime(file_path, (0, start_time + 1))
file_watcher.check()
self.assertEqual(3, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
def test_remove_file(self):
file_path = os.path.join(self.temp_dir, 'file.ext')
with open(file_path, 'w'):
pass
operator = TestOperator((file_path,))
file_watcher = FileWatcher(operator)
file_watcher.check()
self.assertEqual(1, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
os.unlink(file_path)
file_watcher.check()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
def test_remove_file_from_list(self):
file_path = os.path.join(self.temp_dir, 'file.ext')
with open(file_path, 'w'):
pass
operator = TestOperator((file_path,))
file_watcher = FileWatcher(operator)
file_watcher.check()
self.assertEqual(1, operator.list_files_calls)
self.assertEqual(1, operator.execute_calls)
operator.files_list = ()
file_watcher.check()
self.assertEqual(2, operator.list_files_calls)
self.assertEqual(2, operator.execute_calls)
class TestOperator(object):
def __init__(self, files_list=()):
self.files_list = files_list
self.execute_calls = 0
self.list_files_calls = 0
def execute(self):
self.execute_calls += 1
def list_files(self):
self.list_files_calls += 1
return self.files_list
| {
"content_hash": "2c3a760776e3733e776ea7e97e2b8576",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 59,
"avg_line_length": 35.080357142857146,
"alnum_prop": 0.635530669381522,
"repo_name": "kmichel/po-localization",
"id": "d6b3e597b899648a809ddf2653daea15374f3ef2",
"size": "3945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "po_localization/tests/test_file_watcher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "91595"
}
],
"symlink_target": ""
} |
import contextlib
import sys
import logging
import time
import itertools
from teuthology.config import config
from teuthology.exceptions import MaxWhileTries
log = logging.getLogger(__name__)
@contextlib.contextmanager
def nested(*managers):
"""
Like contextlib.nested but takes callables returning context
managers, to avoid the major reason why contextlib.nested was
deprecated.
This version also logs any exceptions early, much like run_tasks,
to ease debugging. TODO combine nested and run_tasks.
"""
exits = []
vars = []
exc = (None, None, None)
try:
for mgr_fn in managers:
mgr = mgr_fn()
exit = mgr.__exit__
enter = mgr.__enter__
vars.append(enter())
exits.append(exit)
yield vars
except Exception:
log.exception('Saw exception from nested tasks')
exc = sys.exc_info()
# FIXME this needs to be more generic
if config.ctx and config.ctx.config.get('interactive-on-error'):
config.ctx.config['interactive-on-error'] = False
from teuthology.task import interactive
log.warning('Saw failure, going into interactive mode...')
interactive.task(ctx=config.ctx, config=None)
finally:
while exits:
exit = exits.pop()
try:
if exit(*exc):
exc = (None, None, None)
except Exception:
exc = sys.exc_info()
if exc != (None, None, None):
# Don't rely on sys.exc_info() still containing
# the right information. Another exception may
# have been raised and caught by an exit method
raise exc[1]
class safe_while(object):
"""
A context manager to remove boiler plate code that deals with `while` loops
that need a given number of tries and some seconds to sleep between each
one of those tries.
The most simple example possible will try 10 times sleeping for 6 seconds:
>>> from teuthology.contexutil import safe_while
>>> with safe_while() as proceed:
... while proceed():
... # repetitive code here
... print("hello world")
...
Traceback (most recent call last):
...
MaxWhileTries: reached maximum tries (5) after waiting for 75 seconds
Yes, this adds yet another level of indentation but it allows you to
implement while loops exactly the same as before with just 1 more
indentation level and one extra call. Everything else stays the same,
code-wise. So adding this helper to existing code is simpler.
:param sleep: The amount of time to sleep between tries. Default 6
:param increment: The amount to add to the sleep value on each try.
Default 0.
:param tries: The amount of tries before giving up. Default 10.
:param action: The name of the action being attempted. Default none.
:param _raise: Whether to raise an exception (or log a warning).
Default True.
:param _sleeper: The function to use to sleep. Only used for testing.
Default time.sleep
"""
def __init__(self, sleep=6, increment=0, tries=10, action=None,
_raise=True, _sleeper=None):
self.sleep = sleep
self.increment = increment
self.tries = tries
self.counter = 0
self.sleep_current = sleep
self.action = action
self._raise = _raise
self.sleeper = _sleeper or time.sleep
def _make_error_msg(self):
"""
Sum the total number of seconds we waited while providing the number
of tries we attempted
"""
total_seconds_waiting = sum(
itertools.islice(
itertools.count(self.sleep, self.increment),
self.tries
)
)
msg = 'reached maximum tries ({tries})' + \
' after waiting for {total} seconds'
if self.action:
msg = "'{action}' " + msg
msg = msg.format(
action=self.action,
tries=self.tries,
total=total_seconds_waiting,
)
return msg
def __call__(self):
self.counter += 1
if self.counter == 1:
return True
if self.counter > self.tries:
error_msg = self._make_error_msg()
if self._raise:
raise MaxWhileTries(error_msg)
else:
log.warning(error_msg)
return False
self.sleeper(self.sleep_current)
self.sleep_current += self.increment
return True
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
return False
| {
"content_hash": "c65063b88ce8c45c9e1a6562eb42659f",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 79,
"avg_line_length": 33.51034482758621,
"alnum_prop": 0.584070796460177,
"repo_name": "SUSE/teuthology",
"id": "57a0f08c86b6dbc969350aca850f188d2ef0f8a2",
"size": "4859",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "teuthology/contextutil.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1096"
},
{
"name": "Makefile",
"bytes": "4194"
},
{
"name": "Python",
"bytes": "1439804"
},
{
"name": "Shell",
"bytes": "61271"
}
],
"symlink_target": ""
} |
from conjureup import events, utils
from conjureup.app_config import app
from . import common
class RegionsController(common.BaseRegionsController):
def render(self):
if app.provider.region or not self.regions:
self.finish(app.provider.region)
elif self.default_region:
self.finish(self.default_region)
else:
utils.warning("You attempted to do an install against a cloud "
"that requires a region without specifying one, "
"and no default could be determined. Please "
"include a region with the cloud in the form: "
"<cloud>/<region>")
events.Shutdown.set(1)
_controller_class = RegionsController
| {
"content_hash": "138803addcf3da39f266a4ece0259571",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 35.63636363636363,
"alnum_prop": 0.6033163265306123,
"repo_name": "ubuntu/conjure-up",
"id": "0f70a90653c05959572d8eda22eab8cf4434973a",
"size": "784",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "conjureup/controllers/juju/regions/tui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2552"
},
{
"name": "Python",
"bytes": "470520"
},
{
"name": "Shell",
"bytes": "4588"
}
],
"symlink_target": ""
} |
import subprocess
import setup_util
from os.path import expanduser
from os import kill
python = expanduser('~/FrameworkBenchmarks/installs/py2/bin/python')
cwd = expanduser('~/FrameworkBenchmarks/tornado')
def start(args):
setup_util.replace_text(
cwd + "/server.py", "localhost", args.database_host)
subprocess.Popen(
python + " server.py --port=8080 --logging=error",
shell=True, cwd=cwd)
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'FrameworkBenchmarks/installs/py2/bin/python server.py --port=8080 --logging=error' in line:
pid = int(line.split(None,2)[1])
kill(pid, 9)
return 0
| {
"content_hash": "d104864c77899d8541b061209c34ccec",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 101,
"avg_line_length": 30.32,
"alnum_prop": 0.6662269129287599,
"repo_name": "julienschmidt/FrameworkBenchmarks",
"id": "913031dbbf76ee93b53812d35b54e101df1f9d1f",
"size": "758",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tornado/setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "838"
},
{
"name": "C",
"bytes": "35766"
},
{
"name": "C#",
"bytes": "122182"
},
{
"name": "C++",
"bytes": "385162"
},
{
"name": "CSS",
"bytes": "496796"
},
{
"name": "Clojure",
"bytes": "17662"
},
{
"name": "D",
"bytes": "180569"
},
{
"name": "Dart",
"bytes": "28773"
},
{
"name": "Erlang",
"bytes": "5847"
},
{
"name": "Go",
"bytes": "1313011"
},
{
"name": "Gosu",
"bytes": "3318"
},
{
"name": "Groovy",
"bytes": "10619"
},
{
"name": "Haskell",
"bytes": "8695"
},
{
"name": "Java",
"bytes": "137359"
},
{
"name": "JavaScript",
"bytes": "586633"
},
{
"name": "Lua",
"bytes": "6816"
},
{
"name": "MoonScript",
"bytes": "1726"
},
{
"name": "Nim",
"bytes": "263"
},
{
"name": "PHP",
"bytes": "17079960"
},
{
"name": "Perl",
"bytes": "13231"
},
{
"name": "PowerShell",
"bytes": "34850"
},
{
"name": "Prolog",
"bytes": "1736"
},
{
"name": "Python",
"bytes": "254288"
},
{
"name": "Racket",
"bytes": "1375"
},
{
"name": "Ruby",
"bytes": "100410"
},
{
"name": "Scala",
"bytes": "56242"
},
{
"name": "Shell",
"bytes": "57427"
},
{
"name": "Volt",
"bytes": "560"
}
],
"symlink_target": ""
} |
from .client import TensorboardServiceClient
from .async_client import TensorboardServiceAsyncClient
__all__ = (
"TensorboardServiceClient",
"TensorboardServiceAsyncClient",
)
| {
"content_hash": "25d4b50fc22165d4a9ce17bb2f362320",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 55,
"avg_line_length": 26.428571428571427,
"alnum_prop": 0.7891891891891892,
"repo_name": "googleapis/python-aiplatform",
"id": "91397832d25fe4032de7e4a630b39174c4660a1d",
"size": "785",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "google/cloud/aiplatform_v1beta1/services/tensorboard_service/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "23977004"
},
{
"name": "Shell",
"bytes": "30668"
}
],
"symlink_target": ""
} |
"""
Module to control a virtual create
"""
from ..vrep import vrep as vrep
from enum import Enum
class VirtualCreate:
"""
Class to control a virtual create in V-REP.
"""
def __init__(self, client_id):
"""Constructor.
Args:
client_id (integer): V-REP client id.
"""
self._clientID = client_id
# query objects
rc, self._obj = vrep.simxGetObjectHandle(self._clientID, "create_estimate", vrep.simx_opmode_oneshot_wait)
# Use custom GUI
_, self._uiHandle = vrep.simxGetUIHandle(self._clientID, "UI", vrep.simx_opmode_oneshot_wait)
vrep.simxGetUIEventButton(self._clientID, self._uiHandle, vrep.simx_opmode_streaming)
def set_pose(self, position, yaw):
vrep.simxSetObjectPosition(self._clientID, self._obj, -1, position,
vrep.simx_opmode_oneshot_wait)
vrep.simxSetObjectOrientation(self._clientID, self._obj, -1, (0, 0, yaw),
vrep.simx_opmode_oneshot_wait)
def set_point_cloud(self, data):
signal = vrep.simxPackFloats(data)
vrep.simxWriteStringStream(self._clientID, "pointCloud", signal, vrep.simx_opmode_oneshot)
class Button(Enum):
MoveForward = 3
TurnLeft = 4
TurnRight = 5
Sense = 6
def get_last_button(self):
self.enable_buttons()
err, button_id, aux = vrep.simxGetUIEventButton(self._clientID, self._uiHandle, vrep.simx_opmode_buffer)
if err == vrep.simx_return_ok and button_id != -1:
self.disable_buttons()
vrep.simxGetUIEventButton(self._clientID, self._uiHandle, vrep.simx_opmode_streaming)
return self.Button(button_id)
return None
def disable_buttons(self):
for i in range(3, 7):
_, prop = vrep.simxGetUIButtonProperty(self._clientID, self._uiHandle, i, vrep.simx_opmode_oneshot)
prop &= ~vrep.sim_buttonproperty_enabled
vrep.simxSetUIButtonProperty(self._clientID, self._uiHandle, i, prop, vrep.simx_opmode_oneshot)
def enable_buttons(self):
for i in range(3, 7):
_, prop = vrep.simxGetUIButtonProperty(self._clientID, self._uiHandle, i, vrep.simx_opmode_oneshot)
# print(prop)
prop |= vrep.sim_buttonproperty_enabled
vrep.simxSetUIButtonProperty(self._clientID, self._uiHandle, i, prop, vrep.simx_opmode_oneshot)
| {
"content_hash": "cc3cba27595785f2291c0f2d0878fdfc",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 114,
"avg_line_length": 38.5,
"alnum_prop": 0.622564935064935,
"repo_name": "USC-ACTLab/pyCreate2",
"id": "6a7bdbabade270ee906e0e3f3c87aab6d91b3436",
"size": "2464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyCreate2/visualization/virtual_create.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "154182"
}
],
"symlink_target": ""
} |
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
class S3PrefixSensor(BaseSensorOperator):
"""
Waits for a prefix to exist. A prefix is the first part of a key,
thus enabling checking of constructs similar to glob airfl* or
SQL LIKE 'airfl%'. There is the possibility to precise a delimiter to
indicate the hierarchy or keys, meaning that the match will stop at that
delimiter. Current code accepts sane delimiters, i.e. characters that
are NOT special characters in the Python regex engine.
:param bucket_name: Name of the S3 bucket
:type bucket_name: str
:param prefix: The prefix being waited on. Relative path from bucket root level.
:type prefix: str
:param delimiter: The delimiter intended to show hierarchy.
Defaults to '/'.
:type delimiter: str
"""
template_fields = ('prefix', 'bucket_name')
@apply_defaults
def __init__(self,
bucket_name,
prefix,
delimiter='/',
aws_conn_id='aws_default',
*args,
**kwargs):
super(S3PrefixSensor, self).__init__(*args, **kwargs)
# Parse
self.bucket_name = bucket_name
self.prefix = prefix
self.delimiter = delimiter
self.full_url = "s3://" + bucket_name + '/' + prefix
self.aws_conn_id = aws_conn_id
def poke(self, context):
self.log.info('Poking for prefix : {self.prefix}\n'
'in bucket s3://{self.bucket_name}'.format(**locals()))
from airflow.hooks.S3_hook import S3Hook
hook = S3Hook(aws_conn_id=self.aws_conn_id)
return hook.check_for_prefix(
prefix=self.prefix,
delimiter=self.delimiter,
bucket_name=self.bucket_name)
| {
"content_hash": "7f86535a2b9dc7caa4ba7974d2b08876",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 84,
"avg_line_length": 38.204081632653065,
"alnum_prop": 0.6223290598290598,
"repo_name": "CloverHealth/airflow",
"id": "917dd46e26c288a0eb9f01dab3bea139dabf194c",
"size": "2684",
"binary": false,
"copies": "6",
"ref": "refs/heads/clover",
"path": "airflow/sensors/s3_prefix_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109698"
},
{
"name": "Dockerfile",
"bytes": "2001"
},
{
"name": "HTML",
"bytes": "275682"
},
{
"name": "JavaScript",
"bytes": "1988427"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "4085946"
},
{
"name": "Shell",
"bytes": "47009"
},
{
"name": "TSQL",
"bytes": "929"
}
],
"symlink_target": ""
} |
import time
from shotgun import settings
class Config(object):
def __init__(self, data=None):
self.data = data
self.time = time.localtime()
def _timestamp(self, name):
return "{0}-{1}".format(
name,
time.strftime('%Y-%m-%d_%H-%M-%S', self.time)
)
@property
def target(self):
target = self.data.get("target", settings.TARGET)
if self.data.get("timestamp", settings.TIMESTAMP):
target = self._timestamp(target)
return target
@property
def lastdump(self):
return self.data.get("lastdump", settings.LASTDUMP)
@property
def objects(self):
for role, properties in self.data["dump"].iteritems():
for host in properties.get("hosts", []):
for object_ in properties.get("objects", []):
object_["host"] = host
yield object_
| {
"content_hash": "d3108e257814d3fd6aa99d47bd83750b",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 62,
"avg_line_length": 27.294117647058822,
"alnum_prop": 0.5506465517241379,
"repo_name": "andrei4ka/fuel-web-redhat",
"id": "86d4dce74e6c2c66162a581e555b5fcb4e6bbb79",
"size": "1538",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "shotgun/shotgun/config.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "100524"
},
{
"name": "JavaScript",
"bytes": "639783"
},
{
"name": "Makefile",
"bytes": "5891"
},
{
"name": "Puppet",
"bytes": "282"
},
{
"name": "Python",
"bytes": "3206343"
},
{
"name": "Ruby",
"bytes": "33423"
},
{
"name": "Shell",
"bytes": "31460"
}
],
"symlink_target": ""
} |
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class AlertHandler(BaseHTTPRequestHandler):
def do_POST(self):
# print out new alert changes
print(self.rfile.read(int(self.headers['Content-Length'])))
self.send_response(200)
self.end_headers()
def run():
httpd = HTTPServer(('0.0.0.0', 9099), AlertHandler)
print('Starting httpd...')
httpd.serve_forever()
if __name__ == "__main__":
run()
| {
"content_hash": "1e457eb02734c82199d106f905de7b0c",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 67,
"avg_line_length": 23.1,
"alnum_prop": 0.645021645021645,
"repo_name": "yaacov/mohawk",
"id": "667ae4ba83fca63cbb6ca67eb78f888e814765a8",
"size": "484",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/alerts/examples/alert-buffer.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "99992"
},
{
"name": "Makefile",
"bytes": "1080"
},
{
"name": "Python",
"bytes": "484"
},
{
"name": "Shell",
"bytes": "5445"
}
],
"symlink_target": ""
} |
answer1 = widget_inputs["radio1"]
answer2 = widget_inputs["radio2"]
answer3 = widget_inputs["radio3"]
answer4 = widget_inputs["radio4"]
answer5 = widget_inputs["radio5"]
answer6 = widget_inputs["radio6"]
answer7 = widget_inputs["radio7"]
answer8 = widget_inputs["radio8"]
answer9 = widget_inputs["radio9"]
answer10 = widget_inputs["radio10"]
is_correct = False
comments = []
def commentizer(new):
if new not in comments:
comments.append(new)
if answer2 == True:
is_correct = True
else:
is_correct = is_correct and False
commentizer("Check the Chrome logo.")
if answer3 == True:
is_correct = is_correct and True
else:
is_correct = is_correct and False
commentizer("Check the kitten photo.")
if answer6 == True:
is_correct = is_correct and True
else:
is_correct = is_correct and False
commentizer("Check the Mexican flag. It's vector, believe it or not.")
if answer8 == True:
is_correct = is_correct and True
else:
is_correct = is_correct and False
commentizer("Check the repeat background.")
if answer9 == True:
is_correct = is_correct and True
else:
is_correct = is_correct and False
commentizer("Check the gradient background.")
if is_correct:
commentizer("Great job! You're starting to learn how to decide between raster and vector options.")
grade_result["comment"] = "\n\n".join(comments)
grade_result["correct"] = is_correct | {
"content_hash": "2ccd185fb403ef5950fb82516294f14e",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 103,
"avg_line_length": 26.67924528301887,
"alnum_prop": 0.698019801980198,
"repo_name": "udacity/responsive-images",
"id": "746bfcfc47937f4cd108e5619081c7af8daa7e76",
"size": "1414",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "grading_scripts/2_14_q.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17651"
},
{
"name": "HTML",
"bytes": "46303"
},
{
"name": "JavaScript",
"bytes": "8515"
},
{
"name": "Python",
"bytes": "10560"
},
{
"name": "Shell",
"bytes": "1395"
}
],
"symlink_target": ""
} |
from m5.SimObject import *
from m5.params import *
# QoS scheduler policy used to serve incoming transaction
class QoSPolicy(SimObject):
type = 'QoSPolicy'
abstract = True
cxx_header = "mem/qos/policy.hh"
cxx_class = 'gem5::memory::qos::Policy'
class QoSFixedPriorityPolicy(QoSPolicy):
type = 'QoSFixedPriorityPolicy'
cxx_header = "mem/qos/policy_fixed_prio.hh"
cxx_class = 'gem5::memory::qos::FixedPriorityPolicy'
cxx_exports = [
PyBindMethod('initRequestorName'),
PyBindMethod('initRequestorObj'),
]
_requestor_priorities = None
def setRequestorPriority(self, request_port, priority):
if not self._requestor_priorities:
self._requestor_priorities = []
self._requestor_priorities.append([request_port, priority])
def setMasterPriority(self, request_port, priority):
warn('QosFixedPriority.setMasterPriority is deprecated in favor of '
'setRequestorPriority. See src/mem/qos/QoSPolicy.py for more '
'information')
self.setRequestorPriority(request_port, priority)
def init(self):
if not self._requestor_priorities:
print("Error,"
"use setRequestorPriority to init requestors/priorities\n");
exit(1)
else:
for prio in self._requestor_priorities:
request_port = prio[0]
priority = prio[1]
if isinstance(request_port, str):
self.getCCObject().initRequestorName(
request_port, int(priority))
else:
self.getCCObject().initRequestorObj(
request_port.getCCObject(), priority)
# default fixed priority value for non-listed Requestors
qos_fixed_prio_default_prio = Param.UInt8(0,
"Default priority for non-listed Requestors")
class QoSPropFairPolicy(QoSPolicy):
type = 'QoSPropFairPolicy'
cxx_header = "mem/qos/policy_pf.hh"
cxx_class = 'gem5::memory::qos::PropFairPolicy'
cxx_exports = [
PyBindMethod('initRequestorName'),
PyBindMethod('initRequestorObj'),
]
_requestor_scores = None
def setInitialScore(self, request_port, score):
if not self._requestor_scores:
self._requestor_scores = []
self._requestor_scores.append([request_port, score])
def init(self):
if not self._requestor_scores:
print("Error, use setInitialScore to init requestors/scores\n");
exit(1)
else:
for prio in self._requestor_scores:
request_port = prio[0]
score = prio[1]
if isinstance(request_port, str):
self.getCCObject().initRequestorName(
request_port, float(score))
else:
self.getCCObject().initRequestorObj(
request_port.getCCObject(), float(score))
weight = Param.Float(0.5, "Pf score weight")
| {
"content_hash": "94e3a36e13c5f692c29edcebb8aefbc5",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 77,
"avg_line_length": 34.54545454545455,
"alnum_prop": 0.6042763157894737,
"repo_name": "gem5/gem5",
"id": "99a3f2faae4e2f007e8a86d33791b7c9cbfa091c",
"size": "5110",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "src/mem/qos/QoSPolicy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "145626"
},
{
"name": "Awk",
"bytes": "3386"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "C",
"bytes": "3927153"
},
{
"name": "C++",
"bytes": "42960484"
},
{
"name": "CMake",
"bytes": "133888"
},
{
"name": "Dockerfile",
"bytes": "34102"
},
{
"name": "Emacs Lisp",
"bytes": "1914"
},
{
"name": "Forth",
"bytes": "354"
},
{
"name": "Fortran",
"bytes": "15436"
},
{
"name": "HTML",
"bytes": "146414"
},
{
"name": "Hack",
"bytes": "139769"
},
{
"name": "Java",
"bytes": "6966"
},
{
"name": "M4",
"bytes": "42624"
},
{
"name": "Makefile",
"bytes": "39573"
},
{
"name": "Perl",
"bytes": "23784"
},
{
"name": "Python",
"bytes": "8079781"
},
{
"name": "Roff",
"bytes": "8754"
},
{
"name": "SCSS",
"bytes": "2971"
},
{
"name": "SWIG",
"bytes": "173"
},
{
"name": "Scala",
"bytes": "5328"
},
{
"name": "Shell",
"bytes": "95638"
},
{
"name": "Starlark",
"bytes": "25668"
},
{
"name": "SuperCollider",
"bytes": "8869"
},
{
"name": "Vim Script",
"bytes": "4343"
},
{
"name": "sed",
"bytes": "3897"
}
],
"symlink_target": ""
} |
"""
Presence analyzer unit tests.
"""
import os.path
import json
import datetime
import unittest
from presence_analyzer import main, utils
from presence_analyzer import views # pylint: disable=unused-import
TEST_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'test_data.csv'
)
TEST_DATA_MANGLED_W_HEADER_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data',
'test_data_mangled_w_header.csv'
)
TEST_USERS_XML = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'test_users.xml'
)
# pylint: disable=maybe-no-member, too-many-public-methods
class PresenceAnalyzerViewsTestCase(unittest.TestCase):
"""
Views tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
main.app.config.update({'DATA_CSV': TEST_DATA_CSV})
main.app.config.update({'USERS_XML': TEST_USERS_XML})
utils.get_data.cache_duration = -1
self.client = main.app.test_client()
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def test_mainpage(self):
"""
Test main page redirect.
"""
resp = self.client.get('/')
self.assertEqual(resp.status_code, 302)
assert resp.headers['Location'].endswith('/presence_weekday')
def test_api_users(self):
"""
Test users listing.
"""
resp = self.client.get('/api/v1/users')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 2)
self.assertDictEqual(
data[0],
{u'user_id': 10, u'name': u'Kowalski A.',
u'avatar': u'http://example.com:80/api/images/users/10'})
def test_mean_time_weekday(self):
"""
Test mean time view.
"""
resp = self.client.get('api/v1/mean_time_weekday/10')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 7)
self.assertListEqual(
[[day, interval] for day, interval in data if interval > 0],
[[u'Tue', 30047], [u'Wed', 24465], [u'Thu', 23705]]
)
resp = self.client.get('api/v1/mean_time_weekday/9000')
self.assertEqual(resp.status_code, 404)
def test_presence_weekday(self):
"""
Test presence weekday view.
"""
resp = self.client.get('api/v1/presence_weekday/10')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertEqual(len(data), 7+1)
self.assertListEqual(
[[day, interval] for day, interval in data if interval > 0],
[[u'Weekday', u'Presence (s)'], [u'Tue', 30047], [u'Wed', 24465],
[u'Thu', 23705]]
)
resp = self.client.get('api/v1/presence_weekday/9000')
self.assertEqual(resp.status_code, 404)
def test_api_presence_start_end(self):
"""
Test mean start-end listing.
"""
resp = self.client.get('/api/v1/presence_start_end/10')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type, 'application/json')
data = json.loads(resp.data)
self.assertLessEqual(len(data), 7)
for row in data:
self.assertEqual(len(row), 3)
_, start, end = row
self.assertLessEqual(start, end)
self.assertListEqual(data, [[u'Tue', 34745, 64792],
[u'Wed', 33592, 58057],
[u'Thu', 38926, 62631]])
resp = self.client.get('/api/v1/presence_start_end/9000')
self.assertEqual(resp.status_code, 404)
def test_templates(self):
"""
Test templates renderers
"""
for url in ('/presence_weekday', '/presence_start_end',
'/mean_time_weekday'):
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.content_type.split(';')[0], 'text/html')
class PresenceAnalyzerUtilsTestCase(unittest.TestCase):
"""
Utility functions tests.
"""
def setUp(self):
"""
Before each test, set up a environment.
"""
main.app.config.update({'DATA_CSV': TEST_DATA_CSV})
main.app.config.update({'USERS_XML': TEST_USERS_XML})
utils.get_data.cache_duration = -1
def tearDown(self):
"""
Get rid of unused objects after each test.
"""
pass
def test_get_data(self):
"""
Test parsing of CSV file.
"""
data = utils.get_data()
self.assertIsInstance(data, dict)
self.assertItemsEqual(data.keys(), [10, 11])
sample_date = datetime.date(2013, 9, 10)
self.assertIn(sample_date, data[10])
self.assertItemsEqual(data[10][sample_date].keys(), ['start', 'end'])
self.assertEqual(
data[10][sample_date]['start'],
datetime.time(9, 39, 5)
)
def test_get_mangled_data(self):
"""
Test parsing of mangled CSV file.
"""
main.app.config.update({'DATA_CSV': TEST_DATA_MANGLED_W_HEADER_CSV})
data = utils.get_data()
self.assertIsInstance(data, dict)
self.assertItemsEqual(data.keys(), [11, ])
sample_date = datetime.date(2013, 9, 10)
self.assertIn(sample_date, data[11])
self.assertItemsEqual(data[11][sample_date].keys(), ['start', 'end'])
self.assertEqual(
data[11][sample_date]['start'],
datetime.time(9, 19, 50)
)
def test_get_user_data(self):
"""
Test parsing of user XML file.
"""
data = utils.get_user_data()
self.assertIsInstance(data, dict)
self.assertItemsEqual(data.keys(), [10, 11, ])
self.assertIn('name', data[11])
self.assertEqual(data[11]['name'], u'Nowak B.')
def test_cache(self):
"""
Test caching.
"""
# pylint: disable=invalid-name
# pylint: disable=missing-docstring
@utils.cache()
def func(b, c=10):
func.a += 1
return func.a+b+c
func.a = 0
self.assertEqual(func(-1), 10)
self.assertEqual(func(-1), func(-1))
self.assertEqual(func(-1), func(-1))
func.cache_duration = -1
self.assertEqual(func(-1), 11)
self.assertEqual(func(0, 0), 3)
@utils.cache(copy=True)
def f():
return []
f().append('test')
self.assertListEqual(f(), [])
def suite():
"""
Default test suite.
"""
base_suite = unittest.TestSuite()
base_suite.addTest(unittest.makeSuite(PresenceAnalyzerViewsTestCase))
base_suite.addTest(unittest.makeSuite(PresenceAnalyzerUtilsTestCase))
return base_suite
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "63bd498ee08ac2cb7f6486e62717b515",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 78,
"avg_line_length": 31.047210300429185,
"alnum_prop": 0.5628974288084048,
"repo_name": "stxnext-kindergarten/presence-analyzer-murbanski",
"id": "2e7881ea2e2f9fecba8a8ab0235a5e001f78d571",
"size": "7258",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/presence_analyzer/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "920"
},
{
"name": "HTML",
"bytes": "7787"
},
{
"name": "JavaScript",
"bytes": "815"
},
{
"name": "Python",
"bytes": "26903"
}
],
"symlink_target": ""
} |
import rospy
class RegisterMonitor(type):
def __init__(cls, name, bases, nmspc):
super(RegisterMonitor, cls).__init__(name, bases, nmspc)
if not hasattr(cls, 'registry'):
cls.registry = {}
cls.registry[cls.name] = cls
# Remove base classes
for b in bases:
if hasattr(b,"name"):
if cls.registry.has_key(b.name):
cls.registry.pop(b.name)
# Metamethods, called on class objects:
def __iter__(cls):
return iter(cls.registry)
def __str__(cls):
if cls in cls.registry:
return cls.__name__
return cls.__name__ + ": " + ", ".join([sc for sc in cls])
class MonitorType(object):
__metaclass__ = RegisterMonitor
name = "None"
description = ""
config_keys = [] # A list of (field_name, field_description) tuples.
def __init__(self, monitor_config, invalid_cb):
""" monitor_config: the config dictionary for this monitor """
# Check the keys are present in the config
for (key, description) in self.config_keys:
if not monitor_config.has_key(key):
raise Exception("'{}' monitor missing field '{}' in yaml".format(self.name,
key))
self.__dict__.update(monitor_config)
self._invalid_cb = invalid_cb
@classmethod
def get_monitor(cls, name):
if not cls.registry.has_key(name):
raise Exception("Unknown monitor type '%s'" % name)
return cls.registry[name]
@classmethod
def create(cls, monitor_config, invalid_cb):
try:
monitor_type = monitor_config['monitor_type']
except KeyError, e:
raise Exception("Monitor config missing field 'monitor_type'")
if not cls.registry.has_key(monitor_config['monitor_type']):
raise Exception("Unknown monitor type '{}'".format(monitor_type))
rospy.loginfo("Creating '{}' monitor".format(monitor_type))
return cls.registry[monitor_type](monitor_config, invalid_cb)
def set_invalid(self):
""" Set that this monitor wants to trigger. """
self._invalid_cb()
def start(self):
"""
Start this monitor, creating subscriptions and threads as needed.
Will be called when starting up, and when restarting
after the watchdog has fired.
"""
pass
def stop(self):
"""
Stops this monitor, cleaning up subscriptions and threads. Called when
shutting down the watchdog when it has fired.
"""
pass
| {
"content_hash": "92851f49980b60f769c176831fc1a592",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 91,
"avg_line_length": 34.97435897435897,
"alnum_prop": 0.5586510263929618,
"repo_name": "bfalacerda/strands_apps",
"id": "3607dfc6d60f1430b2dfde40b76c51ff480bc965",
"size": "2728",
"binary": false,
"copies": "2",
"ref": "refs/heads/indigo-devel",
"path": "watchdog_node/src/watchdog_node/monitors/base_monitor.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "32677"
},
{
"name": "CMake",
"bytes": "9910"
},
{
"name": "Python",
"bytes": "122590"
},
{
"name": "Shell",
"bytes": "649"
}
],
"symlink_target": ""
} |
"""
Copyright 2014 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import requests
from requests import auth
from tempest.common.utils import misc as misc_utils
from tempest import config
from tempest.openstack.common import log as logging
LOG = logging.getLogger(__name__)
CONF = config.CONF
# Use local tempest conf if one is available.
# This usually means we're running tests outside of devstack.
if os.path.exists('./etc/dev_tempest.conf'):
CONF.set_config_path('./etc/dev_tempest.conf')
class BarbicanClientAuth(auth.AuthBase):
"""Implementation of Requests Auth for Barbican http calls."""
def __init__(self, auth_provider):
credentials = auth_provider.fill_credentials()
self.username = credentials.username
self.password = credentials.password
if 'v3' in CONF.identity.auth_version:
self.project_name = credentials.project_name
self.project_id = credentials.project_id
else:
self.tenant_name = credentials.tenant_name
self.project_id = credentials.tenant_id
try:
self.token = auth_provider.get_token()
except ValueError:
# hockeynut - some auth providers will allow the v3 expiration
# date format which includes milliseconds. This change will retry
# the call to get the auth token with the milliseconds included in
# the date format string.
auth_provider.EXPIRY_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
self.token = auth_provider.get_token()
def __call__(self, r):
r.headers['X-Project-Id'] = self.project_id
r.headers['X-Auth-Token'] = self.token
return r
class BarbicanClient(object):
def __init__(self, auth_provider, api_version='v1'):
self._auth = BarbicanClientAuth(auth_provider)
self._auth_provider = auth_provider
self.timeout = 10
self.api_version = api_version
self.default_headers = {
'Content-Type': 'application/json'
}
self.region = CONF.identity.region
def _attempt_to_stringify_content(self, content, content_tag):
if content is None:
return content
try:
# NOTE(jaosorior): The content is decoded as ascii since the
# logging module has problems with utf-8 strings and will end up
# trying to decode this as ascii.
return content.decode('ascii')
except UnicodeDecodeError:
# NOTE(jaosorior): Since we are using base64 as default and this is
# only for logging (in order to debug); Lets not put too much
# effort in this and just use encoded string.
return content.encode('base64')
def stringify_request(self, request_kwargs, response):
format_kwargs = {
'code': response.status_code,
'method': request_kwargs.get('method'),
'url': request_kwargs.get('url'),
'headers': response.request.headers,
}
format_kwargs['body'] = self._attempt_to_stringify_content(
request_kwargs.get('data'), 'body')
format_kwargs['response_body'] = self._attempt_to_stringify_content(
response.content, 'response_body')
return ('{code} {method} {url}\n'
'Request Headers: {headers}\n'
'Request Body: {body}\n'
'Response: {response_body}').format(**format_kwargs)
def log_request(self, request_kwargs, response):
test_name = misc_utils.find_test_caller()
str_request = self.stringify_request(request_kwargs, response)
LOG.info('Request (%s)\n %s', test_name, str_request)
def _status_is_2xx_success(self, status_code):
return status_code >= 200 and status_code < 300
def attempt_to_deserialize(self, response, model_type):
if (self._status_is_2xx_success(response.status_code) and
model_type and hasattr(model_type, 'json_to_obj')):
return model_type.json_to_obj(response.content)
return None
def attempt_to_serialize(self, model):
if model and hasattr(model, 'obj_to_json'):
return model.obj_to_json()
def get_base_url(self, include_version=True):
if CONF.keymanager.override_url:
return CONF.keymanager.override_url
filters = {
'service': 'key-manager',
'region': self.region,
'api_version': self.api_version if include_version else ''
}
return self._auth_provider.base_url(filters)
def get_list_of_models(self, item_list, model_type):
"""Takes a list of barbican objects and creates a list of models
:param item_list: the json returned from a barbican GET request for
a list of objects
:param model_type: The model used in the creation of the list of models
:return A list of models and the refs for next and previous lists.
"""
models, next_ref, prev_ref = [], None, None
for item in item_list:
if 'next' == item:
next_ref = item_list.get('next')
elif 'previous' == item:
prev_ref = item_list.get('previous')
elif item in ('secrets', 'orders', 'containers', 'consumers'):
for entity in item_list.get(item):
models.append(model_type(**entity))
return models, next_ref, prev_ref
def request(self, method, url, data=None, extra_headers=None,
use_auth=True, response_model_type=None, request_model=None,
params=None):
"""Prepares and sends http request through Requests."""
if 'http' not in url:
url = os.path.join(self.get_base_url(), url)
# Duplicate Base headers and add extras (if needed)
headers = {}
headers.update(self.default_headers)
if extra_headers:
headers.update(extra_headers)
# Attempt to serialize model if required
if request_model:
data = self.attempt_to_serialize(request_model)
# Prepare call arguments
call_kwargs = {
'method': method,
'url': url,
'headers': headers,
'data': data,
'timeout': self.timeout,
'params': params
}
if use_auth:
call_kwargs['auth'] = self._auth
response = requests.request(**call_kwargs)
# Attempt to deserialize the response
response.model = self.attempt_to_deserialize(response,
response_model_type)
self.log_request(call_kwargs, response)
return response
def get(self, *args, **kwargs):
"""Proxies the request method specifically for http GET methods."""
return self.request('GET', *args, **kwargs)
def post(self, *args, **kwargs):
"""Proxies the request method specifically for http POST methods."""
return self.request('POST', *args, **kwargs)
def put(self, *args, **kwargs):
"""Proxies the request method specifically for http PUT methods."""
return self.request('PUT', *args, **kwargs)
def delete(self, *args, **kwargs):
"""Proxies the request method specifically for http DELETE methods."""
return self.request('DELETE', *args, **kwargs)
| {
"content_hash": "638d61034acd2e56672c487ef9868b1a",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 79,
"avg_line_length": 37.27230046948357,
"alnum_prop": 0.6148129487340975,
"repo_name": "jmvrbanac/barbican",
"id": "5def5904b8085e1646452916f9e510db03f8798f",
"size": "7939",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "functionaltests/common/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1217522"
},
{
"name": "Shell",
"bytes": "19818"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0009_class'),
]
operations = [
migrations.AddField(
model_name='ability',
name='cls',
field=models.ForeignKey(default=None, to='app.Class'),
),
]
| {
"content_hash": "f401c8d3fabf946c60c5066f680a899d",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 66,
"avg_line_length": 20.72222222222222,
"alnum_prop": 0.5764075067024129,
"repo_name": "XcomConvent/xcom40k-shades",
"id": "be3e0a0292d75fa533d37e261dd35eaaaece4299",
"size": "397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xcom40k/app/migrations/0010_ability_cls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "20534"
},
{
"name": "HTML",
"bytes": "99034"
},
{
"name": "Python",
"bytes": "64904"
},
{
"name": "Shell",
"bytes": "43"
}
],
"symlink_target": ""
} |
import apache_beam as beam
from log_elements import LogElements
class BreakIntoWordsDoFn(beam.DoFn):
def process(self, element):
return element.split()
with beam.Pipeline() as p:
(p | beam.Create(['Hello Beam', 'It is awesome'])
| beam.ParDo(BreakIntoWordsDoFn())
| LogElements())
| {
"content_hash": "1cfb9d5ae4c62143b4ccfe8009e4d9d6",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 51,
"avg_line_length": 18.41176470588235,
"alnum_prop": 0.6773162939297125,
"repo_name": "robertwb/incubator-beam",
"id": "8d63b568d439d5042f7af35fda15afde76c73692",
"size": "1375",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "learning/katas/python/Core Transforms/Map/ParDo OneToMany/task.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1598"
},
{
"name": "C",
"bytes": "3869"
},
{
"name": "CSS",
"bytes": "4957"
},
{
"name": "Cython",
"bytes": "59582"
},
{
"name": "Dart",
"bytes": "541526"
},
{
"name": "Dockerfile",
"bytes": "48191"
},
{
"name": "FreeMarker",
"bytes": "7933"
},
{
"name": "Go",
"bytes": "4688736"
},
{
"name": "Groovy",
"bytes": "888171"
},
{
"name": "HCL",
"bytes": "101646"
},
{
"name": "HTML",
"bytes": "164685"
},
{
"name": "Java",
"bytes": "38649211"
},
{
"name": "JavaScript",
"bytes": "105966"
},
{
"name": "Jupyter Notebook",
"bytes": "55818"
},
{
"name": "Kotlin",
"bytes": "209531"
},
{
"name": "Lua",
"bytes": "3620"
},
{
"name": "Python",
"bytes": "9785295"
},
{
"name": "SCSS",
"bytes": "312814"
},
{
"name": "Sass",
"bytes": "19336"
},
{
"name": "Scala",
"bytes": "1429"
},
{
"name": "Shell",
"bytes": "336583"
},
{
"name": "Smarty",
"bytes": "2618"
},
{
"name": "Thrift",
"bytes": "3260"
},
{
"name": "TypeScript",
"bytes": "181369"
}
],
"symlink_target": ""
} |
'''@file tfreader.py
contains the TfReader class'''
from abc import ABCMeta, abstractmethod, abstractproperty
import tensorflow as tf
class TfReader(object):
'''class for reading tfrecord files and processing them'''
__metaclass__ = ABCMeta
def __init__(self, datadirs):
'''TfReader constructor
Args:
datadirs: the directories where the metadata was stored as a list
of strings
'''
#read the metadata
self.metadata = self._read_metadata(datadirs)
#create the features object
self.features = self._create_features()
#create a reader
self.reader = tf.TFRecordReader()
def __call__(self, queue, name=None):
'''read all data from the queue
Args:
queue: a queue containing filenames of tf record files
name: the name of the operation
Returns:
a pair of tensor and sequence length
'''
with tf.name_scope(name or type(self).__name__):
#read all the elements in the queue
_, serialized = self.reader.read(queue)
#parse the serialized strings into features
features = tf.parse_single_example(serialized, self.features)
#process the parsed features
processed = self._process_features(features)
return processed
@abstractmethod
def _read_metadata(self, datadirs):
'''read the metadata for the reader (writen by the processor)
Args:
datadirs: the directories where the metadata was stored as a
list of strings
Returns:
the metadata as a dictionary
'''
@abstractmethod
def _create_features(self):
'''
creates the information about the features
Returns:
A dict mapping feature keys to FixedLenFeature, VarLenFeature,
and SparseFeature values
'''
@abstractmethod
def _process_features(self, features):
'''process the read features
features:
A dict mapping feature keys to Tensor and SparseTensor values
Returns:
a pair of tensor and sequence length
'''
| {
"content_hash": "846a0ebe12d2a824253787dc975c6026",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 78,
"avg_line_length": 27.11904761904762,
"alnum_prop": 0.5921861281826163,
"repo_name": "vrenkens/nabu",
"id": "6d3703256883a8e15ff1d65cbff1915e6e679b6a",
"size": "2278",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nabu/processing/tfreaders/tfreader.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "322709"
},
{
"name": "Shell",
"bytes": "5290"
}
],
"symlink_target": ""
} |
__author__ = 'tbeltramelli'
import cv2
from pylab import *
class UMedia:
@staticmethod
def get_image(path):
return cv2.imread(path)
@staticmethod
def load_video(path, callback):
cap = cv2.VideoCapture(path)
is_reading = True
while is_reading:
is_reading, img = cap.read()
if is_reading == True:
callback(img)
ch = cv2.waitKey(33)
if ch == 32:
cv2.destroyAllWindows()
break
@staticmethod
def load_media(path, callback):
if ".jpg" in path or ".png" in path or ".bmp" in path:
callback(UMedia.get_image(path))
else:
UMedia.load_video(path, callback)
@staticmethod
def show(*images):
for i, img in enumerate(images):
cv2.namedWindow(("image %d" % i), cv2.WINDOW_AUTOSIZE)
cv2.imshow(("image %d" % i), img)
#cv2.waitKey(0)
#cv2.destroyWindow(("image %d" % i))
@staticmethod
def show_all_gray(*images):
for i, img in enumerate(images):
gray()
subplot(1, len(images), i+1)
title(("image %d" % i))
imshow(img)
show()
@staticmethod
def show_all_rgb(*images):
for i, img in enumerate(images):
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
subplot(1, len(images), i+1)
title(("image %d" % i))
imshow(img)
show() | {
"content_hash": "15a901bff0d12e1f54381d6b98e2fe1a",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 66,
"avg_line_length": 26.25862068965517,
"alnum_prop": 0.5088640840446487,
"repo_name": "tonybeltramelli/Graphics-And-Vision",
"id": "32793e480af27e622934e781903b7963232f5b27",
"size": "1523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Projective-Geometry/tony/com.tonybeltramelli.homography/UMedia.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "343757"
}
],
"symlink_target": ""
} |
import requests
import json
base_url = "https://api.sbgenomics.com/v2/"
def run_task(token, app_id):
url = base_url + "apps/" + app_id
headers = {'X-SBG-Auth-Token': token,
'Content-Type': 'application/json'}
resp = requests.get(url, headers=headers)
#print(resp.json())
print(json.dumps(resp.json(),indent=4))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Fun with 7Bridges")
parser.add_argument('--token', help='your 7Bridges api access token')
parser.add_argument('--app_id', help='your 7Bridges App ID - "owner/project/appname/revision"')
args = parser.parse_args()
run_task(args.token, args.app_id)
| {
"content_hash": "57dc9896edea33265a5311be9f6f795f",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 99,
"avg_line_length": 30.956521739130434,
"alnum_prop": 0.648876404494382,
"repo_name": "hms-dbmi/fourfront",
"id": "bcb00688beb5e415759964810d9ff0ccc6796779",
"size": "712",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/encoded/workflow_examples/get_cwl_from_SBG.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "741"
},
{
"name": "CSS",
"bytes": "198339"
},
{
"name": "Cucumber",
"bytes": "16918"
},
{
"name": "HTML",
"bytes": "371973"
},
{
"name": "JavaScript",
"bytes": "1403972"
},
{
"name": "Makefile",
"bytes": "110"
},
{
"name": "PLpgSQL",
"bytes": "12067"
},
{
"name": "Python",
"bytes": "751772"
},
{
"name": "Ruby",
"bytes": "1066"
},
{
"name": "Shell",
"bytes": "2248"
}
],
"symlink_target": ""
} |
import glob
import os
import sys
from lib.util import execute
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
os.chdir(SOURCE_ROOT)
eslint = os.path.join(SOURCE_ROOT, 'node_modules', '.bin', 'eslint')
if sys.platform in ['win32', 'cygwin']:
eslint += '.cmd'
settings = ['--quiet', '--config']
sourceConfig = os.path.join('script', 'eslintrc-base.json')
sourceFiles = ['atom']
execute([eslint] + settings + [sourceConfig] + sourceFiles)
specConfig = os.path.join('script', 'eslintrc-spec.json')
specFiles = glob.glob('spec/*.js')
execute([eslint] + settings + [specConfig] + specFiles)
if __name__ == '__main__':
sys.exit(main())
| {
"content_hash": "b9a167f05f86a647bbe6af5bfde3c580",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 73,
"avg_line_length": 24.24137931034483,
"alnum_prop": 0.6500711237553343,
"repo_name": "Evercoder/electron",
"id": "8fa2c3a7e3a7f4fcbed4f69d0d6ddebb0d79ee6d",
"size": "726",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "script/eslint.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4384"
},
{
"name": "C++",
"bytes": "1718549"
},
{
"name": "HTML",
"bytes": "5014"
},
{
"name": "JavaScript",
"bytes": "365145"
},
{
"name": "Objective-C",
"bytes": "15129"
},
{
"name": "Objective-C++",
"bytes": "141411"
},
{
"name": "Python",
"bytes": "80823"
},
{
"name": "Shell",
"bytes": "2593"
}
],
"symlink_target": ""
} |
import socket
from qpid.util import connect
TRANSPORTS = {}
class SocketTransport:
def __init__(self, conn, host, port):
self.socket = connect(host, port)
if conn.tcp_nodelay:
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
def fileno(self):
return self.socket.fileno()
class tcp(SocketTransport):
def reading(self, reading):
return reading
def writing(self, writing):
return writing
def send(self, bytes):
return self.socket.send(bytes)
def recv(self, n):
return self.socket.recv(n)
def close(self):
self.socket.close()
TRANSPORTS["tcp"] = tcp
try:
from ssl import wrap_socket, SSLError, SSL_ERROR_WANT_READ, \
SSL_ERROR_WANT_WRITE, CERT_REQUIRED, CERT_NONE
except ImportError:
## try the older python SSL api:
from socket import ssl
class old_ssl(SocketTransport):
def __init__(self, conn, host, port):
SocketTransport.__init__(self, conn, host, port)
# Bug (QPID-4337): this is the "old" version of python SSL.
# The private key is required. If a certificate is given, but no
# keyfile, assume the key is contained in the certificate
ssl_keyfile = conn.ssl_keyfile
ssl_certfile = conn.ssl_certfile
if ssl_certfile and not ssl_keyfile:
ssl_keyfile = ssl_certfile
# this version of SSL does NOT perform certificate validation. If the
# connection has been configured with CA certs (via ssl_trustfile), then
# the application expects the certificate to be validated against the
# supplied CA certs. Since this version cannot validate, the peer cannot
# be trusted.
if conn.ssl_trustfile:
raise socket.error("This version of Python does not support verification of the peer's certificate.")
self.ssl = ssl(self.socket, keyfile=ssl_keyfile, certfile=ssl_certfile)
self.socket.setblocking(1)
def reading(self, reading):
return reading
def writing(self, writing):
return writing
def recv(self, n):
return self.ssl.read(n)
def send(self, s):
return self.ssl.write(s)
def close(self):
self.socket.close()
TRANSPORTS["ssl"] = old_ssl
TRANSPORTS["tcp+tls"] = old_ssl
else:
class tls(SocketTransport):
def __init__(self, conn, host, port):
SocketTransport.__init__(self, conn, host, port)
if conn.ssl_trustfile:
validate = CERT_REQUIRED
else:
validate = CERT_NONE
# if user manually set flag to false then require cert
actual = getattr(conn, "_ssl_skip_hostname_check_actual", None)
if actual is not None and conn.ssl_skip_hostname_check is False:
validate = CERT_REQUIRED
self.tls = wrap_socket(self.socket, keyfile=conn.ssl_keyfile,
certfile=conn.ssl_certfile,
ca_certs=conn.ssl_trustfile,
cert_reqs=validate)
if validate == CERT_REQUIRED and not conn.ssl_skip_hostname_check:
verify_hostname(self.tls.getpeercert(), host)
self.socket.setblocking(0)
self.state = None
# See qpid-4872: need to store the parameters last passed to tls.write()
# in case the calls fail with an SSL_ERROR_WANT_* error and we have to
# retry the call with the same parameters.
self.write_retry = None # buffer passed to last call of tls.write()
def reading(self, reading):
if self.state is None:
return reading
else:
return self.state == SSL_ERROR_WANT_READ
def writing(self, writing):
if self.state is None:
return writing
else:
return self.state == SSL_ERROR_WANT_WRITE
def send(self, bytes):
if self.write_retry is None:
self.write_retry = bytes
self._clear_state()
try:
n = self.tls.write( self.write_retry )
self.write_retry = None
return n
except SSLError, e:
if self._update_state(e.args[0]):
# will retry on next invokation
return 0
self.write_retry = None
raise
except:
self.write_retry = None
raise
def recv(self, n):
self._clear_state()
try:
return self.tls.read(n)
except SSLError, e:
if self._update_state(e.args[0]):
# will retry later:
return None
else:
raise
def _clear_state(self):
self.state = None
def _update_state(self, code):
if code in (SSL_ERROR_WANT_READ, SSL_ERROR_WANT_WRITE):
self.state = code
return True
else:
return False
def close(self):
self.socket.setblocking(1)
# this closes the underlying socket
self.tls.close()
def verify_hostname(peer_certificate, hostname):
match_found = False
peer_names = []
if peer_certificate:
if 'subjectAltName' in peer_certificate:
for san in peer_certificate['subjectAltName']:
if san[0] == 'DNS':
peer_names.append(san[1].lower())
if 'subject' in peer_certificate:
for sub in peer_certificate['subject']:
while isinstance(sub, tuple) and isinstance(sub[0], tuple):
sub = sub[0] # why the extra level of indirection???
if sub[0] == 'commonName':
peer_names.append(sub[1].lower())
for pattern in peer_names:
if _match_dns_pattern(hostname.lower(), pattern):
match_found = True
break
if not match_found:
raise SSLError("Connection hostname '%s' does not match names from peer certificate: %s" % (hostname, peer_names))
def _match_dns_pattern( hostname, pattern ):
""" For checking the hostnames provided by the peer's certificate
"""
if pattern.find("*") == -1:
return hostname == pattern
# DNS wildcarded pattern - see RFC2818
h_labels = hostname.split(".")
p_labels = pattern.split(".")
while h_labels and p_labels:
if p_labels[0].find("*") == -1:
if p_labels[0] != h_labels[0]:
return False
else:
p = p_labels[0].split("*")
if not h_labels[0].startswith(p[0]):
return False
if not h_labels[0].endswith(p[1]):
return False
h_labels.pop(0)
p_labels.pop(0)
return not h_labels and not p_labels
TRANSPORTS["ssl"] = tls
TRANSPORTS["tcp+tls"] = tls
| {
"content_hash": "0e2c3fd26786cbce182724779edd1a83",
"timestamp": "",
"source": "github",
"line_count": 217,
"max_line_length": 120,
"avg_line_length": 29.52995391705069,
"alnum_prop": 0.615792759051186,
"repo_name": "irinabov/debian-qpid-python",
"id": "c4e7c6834ee64bf00638ec3a94d102252c95f277",
"size": "7198",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "qpid/messaging/transports.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "43"
},
{
"name": "Python",
"bytes": "999393"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Dance around like a madman."
args = ''
requires_system_checks = '__all__'
def add_arguments(self, parser):
parser.add_argument("integer", nargs='?', type=int, default=0)
parser.add_argument("-s", "--style", default="Rock'n'Roll")
parser.add_argument("-x", "--example")
parser.add_argument("--opt-3", action='store_true', dest='option3')
def handle(self, *args, **options):
example = options["example"]
if example == "raise":
raise CommandError(returncode=3)
if options['verbosity'] > 0:
self.stdout.write("I don't feel like dancing %s." % options["style"])
self.stdout.write(','.join(options))
if options['integer'] > 0:
self.stdout.write("You passed %d as a positional argument." % options['integer'])
| {
"content_hash": "6c78eae42ee8efb969a9f4ec9595c441",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 93,
"avg_line_length": 40.91304347826087,
"alnum_prop": 0.6057385759829969,
"repo_name": "koordinates/django",
"id": "efa1bc0d8a4f21ed621a55673e63f7bf56dd4671",
"size": "941",
"binary": false,
"copies": "19",
"ref": "refs/heads/stable/3.2.x-kx",
"path": "tests/user_commands/management/commands/dance.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "84917"
},
{
"name": "HTML",
"bytes": "223820"
},
{
"name": "JavaScript",
"bytes": "139791"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Python",
"bytes": "14472067"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "142"
}
],
"symlink_target": ""
} |
'''
Created on 2014��11��12��
@author: ���
'''
class ModuleFactory():
_builders = {}
def registerModuleBuilder(self, module_type, module_builder):
self._builders[module_type] = module_builder;
def buildModule(self, module_type, module_config, context=None):
builder = self._builders[module_type]
return builder.build(module_config, context)
class Module(object):
'''
classdocs
'''
_dependents = None
_context = None
input_name = None
def __init__(self, module_setting, context=None):
self._id = module_setting['id']
self._dependents = []
self._context = context
class ModuleBuilder(object):
def build(self, module_config, context=None):
pass
| {
"content_hash": "959685b5d71136cb50101ab4099a6307",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 68,
"avg_line_length": 22.794117647058822,
"alnum_prop": 0.6025806451612903,
"repo_name": "kevenli/FeedIn",
"id": "3a4187ed314224ca2b53cb4d2919c7ec6e8e46b0",
"size": "817",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feedin/modules/module.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "106298"
},
{
"name": "Python",
"bytes": "52804"
}
],
"symlink_target": ""
} |
'''
Copyright (c) 2017 Brandon Butch
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import simplejson
import logging
import threading
import os
import time
from queue import Empty
from threading import Thread, Lock, RLock
from enum import Enum
from datetime import datetime
from decimal import Decimal, ROUND_DOWN, ROUND_UP
from queue import Queue
from .ws_client import WebSocketClient
from .auth_client import AuthenticatedClient
#When placing a new order on GDAX, we create an Order instance for our local record keeping
class Order():
def __init__(self, side, type, product_id):
self.server_msgs = list()
self.trade_ids = list()
self.fills = dict()
self.order_id = None
self.side = side
self.product_id = product_id
self.type = type
self.price = Decimal(-1)
self.filled_size = Decimal(0)
self.post_only = False
self.fill_fees = Decimal(0)
self.executed_value = Decimal(0)
self.final_cost = Decimal(0)
self.hold_amount = Decimal(0)
self.status = OrderStatus.OPEN
self.settled = False
self.fee_percentage = BotBase.FEE_STRUCTURE[self.product_id]
class OrderStatus(Enum):
OPEN = 1
FILLED = 2
CX_NO_FILL = 3
CX_PART_FILL = 4
class BotBase:
#CONSTANTS
FEE_STRUCTURE = {"BTC-USD": Decimal("0.0025"),
"ETH-USD": Decimal("0.003"),
"LTC-USD": Decimal("0.003"),
"ETH-BTC": Decimal("0.003"),
"LTC-BTC": Decimal("0.003")}
MIN_DENOM = {"BTC": Decimal("0.00000001"),
"LTC": Decimal("0.00000001"),
"ETH": Decimal("0.000000000000000001")}
BTC = "BTC"
LTC = "LTC"
ETH = "ETH"
USD = "USD"
BUY = "buy"
SELL = "sell"
LIMIT = "limit"
MARKET = "market"
STOP = "stop"
PRODUCT_ID = "product_id"
ORDER_ID = "order_id"
MESSAGE = "message"
TYPE = "type"
DONE = "done"
DONE_REASON = "done_reason"
REASON = "reason"
NOT_FOUND = "NotFound"
ORDER_ALREADY_DONE = "Order already done"
MATCH = "match"
FILLED = "filled"
CANCELED = "canceled"
TRADE_ID = "trade_id"
STATUS = "status"
REJECTED = "rejected"
REJECT_REASON = "reject_reason"
POST_ONLY = "post only"
ASK = "ask"
BID = "bid"
BEST_ASK = "best_ask"
BEST_BID = "best_bid"
SIZE = "size"
ID = "id"
PRICE = "price"
EXECUTED_VALUE = "executed_value"
FILL_FEES = "fill_fees"
FINAL_COST = "final_cost"
LOG_FILE = "log.log"
TAKER_ORDER_ID = "taker_order_id"
MAKER_ORDER_ID = "maker_order_id"
FEE = "fee"
INTERNAL_SERVER_ERROR = "Internal server error"
def __init__(self, bankroll, key, b64secret, passphrase, *product_ids):
self.round_ongoing = False
self.market_price = dict()
self.current_round = 1
self.available = dict(bankroll)
self.on_hold = dict()
self.total_cost = dict()
self.avg_cost = dict()
self.orders = dict()
self.product_ids = product_ids
self.bankroll = bankroll
self.bot_id = datetime.now().strftime("%Y%m%d%H%M%S%f")
self.bot_dir = "./" + self.bot_id + "/"
directory = os.path.dirname(self.bot_dir)
try:
os.makedirs(directory)
except OSError as e:
raise
self._auth_client = AuthenticatedClient(key, b64secret, passphrase)
self.__logger = logging.getLogger("root")
self.__trading = True
self.__order_lock = Lock()
self.__wsclient = WebSocketClient(self.product_ids, key, b64secret, passphrase)
self.__trading_thread = Thread(name = "TradingThread", target = self._trade)
self.__ticker_processing_thread = Thread(name = "TickerProcessingThread", target = self._process_ticker_msgs)
self.__order_processing_thread = Thread(name = "OrderProcessingThread", target = self._process_order_msgs)
self.__logger.setLevel(logging.DEBUG)
self.__logger.propagate = 0
log_formatter = formatter = logging.Formatter("[%(threadName)s][%(filename)s:%(funcName)s()][%(levelname)s][%(asctime)s]: %(message)s\n")
log_fh = logging.FileHandler(self.bot_dir + self.LOG_FILE)
log_fh.setLevel(logging.DEBUG)
log_fh.setFormatter(log_formatter)
self.__logger.addHandler(log_fh)
self.__logger.info("Authenticated client configured for LIVE server")
for product_id in self.product_ids:
self.total_cost[product_id[:3]] = {product_id[-3:]: Decimal(0)}
self.avg_cost[product_id[:3]] = {product_id[-3:]: Decimal(0)}
self.on_hold[product_id[:3]] = Decimal(0)
self.on_hold[product_id[-3:]] = Decimal(0)
if not product_id[:3] in self.available:
self.available[product_id[:3]] = Decimal(0)
if not product_id[-3:] in self.available:
self.available[product_id[-3]] = Decimal(0)
self.__logger.info("Retrieving initial market prices...")
for product_id in self.product_ids:
ticker = self._request(self._auth_client.get_product_ticker, product_id = product_id)
self.market_price[product_id] = (Decimal(ticker[self.ASK]) + Decimal(ticker[self.BID]))/2
self.__logger.info("Initial market prices retrieved")
self.__ticker_processing_thread.daemon = True
self.__order_processing_thread.daemon = True
self.__trading_thread.daemon = True
self.__logger.debug("Init for BotBase completed")
#Processes ticker messages that are queued by WebSocketClient; loops indefinitely in its own
#thread until it gets() None from the queue.
def _process_ticker_msgs(self):
processing = True
while processing:
msg = self.__wsclient.ticker_msgs.get()
if msg == None:
processing = False
else:
product_id = msg[self.PRODUCT_ID]
self.market_price[product_id] = (Decimal(msg[self.BEST_ASK]) + Decimal(msg[self.BEST_BID]))/2 #mid-market price
self.__wsclient.ticker_msgs.task_done()
#Processes order messages that are queued by WebSocketClient; loops indefinitely in its own
#thread until it gets() None from the queue. WebSocketClient only queues messages that are
#relevant to orders that we have placed, so every messages we get() here is important. This
#function updates the individual order as necessary (eg if there is a "match" message, we
#updated the filled_size here). If we receive a "done" message (indicating that an order is
#completed finished because it was filled or canceled), we call _settle() to update our local
#books.
def _process_order_msgs(self):
processing = True
while processing:
if self.__wsclient.unrectified_dropped_msgs:
with self.__wsclient.dropped_msgs_lock:
with self.__order_lock:
self._rectify_orders()
self.__wsclient.unrectified_dropped_msgs = False
try:
msg = self.__wsclient.order_msgs.get(block = False)
except Empty:
continue
if msg == None:
processing = False
else:
with self.__order_lock:
self.__logger.debug("Received message: " + str(msg))
if self.ORDER_ID in msg:
order_id = msg[self.ORDER_ID]
elif self.MAKER_ORDER_ID in msg and msg[self.MAKER_ORDER_ID] in self.orders:
order_id = msg[self.MAKER_ORDER_ID]
taker = False
elif self.TAKER_ORDER_ID in msg and msg[self.TAKER_ORDER_ID] in self.orders:
order_id = msg[self.TAKER_ORDER_ID]
taker = True
self.orders[order_id].server_msgs.append(msg)
#If this is a "match" message, and we have NOT seen it before, we update the order's status locally
#Because of dropped messages combined with network lag, the second condition is necessary to prevent
#double counting of fills that can occur when _rectify_orders is called
if msg[self.TYPE] == self.MATCH and not msg[self.TRADE_ID] in self.orders[order_id].trade_ids:
self.orders[order_id].filled_size += Decimal(msg[self.SIZE])
self.orders[order_id].trade_ids.append(msg[self.TRADE_ID])
if not msg[self.PRICE] in self.orders[order_id].fills:
self.orders[order_id].fills[msg[self.PRICE]] = Decimal(0)
self.orders[order_id].fills[msg[self.PRICE]] += Decimal(msg[self.SIZE])
if taker:
fee = Decimal(msg[self.SIZE]) * Decimal(msg[self.PRICE]) * self.orders[order_id].fee_percentage
self.orders[order_id].fill_fees += fee
elif msg[self.TYPE] == self.DONE:
if msg[self.REASON] == self.FILLED:
self.orders[order_id].status = OrderStatus.FILLED
else:
if self.orders[order_id].filled_size > 0:
self.orders[order_id].status = OrderStatus.CX_PART_FILL
else:
self.orders[order_id].status = OrderStatus.CX_NO_FILL
if not self.orders[order_id].settled:
self._settle(order_id)
self.__wsclient.order_msgs.task_done()
#Rectifies our local orders against the server's. This is called when we detect that messages were dropped from the
#websocket feed. It's not possible to outright determine if any of the dropped messages applied to us, so we have to
#rectify our order manually with requests to the server.
def _rectify_orders(self):
self.__logger.warning("WebSocket client indicated that messages were dropped. Rectifying local order records...")
for order_id in self.orders:
if self.orders[order_id].status == OrderStatus.OPEN:
server_order = self._request(self._auth_client.get_order, order_id)
#If an order was canceled with no fills in its lifetime, GDAX will (probably) purge it from its records
if (self.MESSAGE, self.NOT_FOUND) in server_order.items():
self.orders[order_id].status = OrderStatus.CX_NO_FILL
else:
#We get all fills for this order and compare them against the ones we saw on the websocket feed, updating accordingly
fills = self._request(self._auth_client.get_fills, order_id = order_id)
for page in fills:
for fill in page:
if not fill[self.TRADE_ID] in self.orders[order_id].trade_ids:
self.orders[order_id].filled_size += Decimal(fill[self.SIZE])
self.orders[order_id].trade_ids.append(fill[self.TRADE_ID])
self.orders[order_id].fill_fees += Decimal(fill[self.FEE])
if not fill[self.PRICE] in self.orders[order_id].fills:
self.orders[order_id].fills[fill[self.PRICE]] = Decimal(0)
self.orders[order_id].fills[fill[self.PRICE]] += Decimal(fill[self.SIZE])
if server_order[self.STATUS] == self.DONE:
if server_order[self.DONE_REASON] == self.FILLED:
self.orders[order_id].status = OrderStatus.FILLED
elif server_order[self.DONE_REASON] == self.CANCELED:
if self.orders[order_id].filled_size > 0:
self.orders[order_id].status = OrderStatus.CX_PART_FILL
else:
self.orders[order_id].status = OrderStatus.CX_NO_FILL
if not self.orders[order_id].settled:
self._settle(order_id)
#Wrapper to _request() to place a buy order. After sending the request to GDAX servers, we update our local
#books with appropriate holds, balances, etc. Currently, for market/stop orders, you must provide "funds" as
#an argument; this function does not allow you to specify "size" yet. We lock this function with __order_lock
#to prevent our order processing thread from accidentally ignoring messages it may receive about this order
#before we've had a chance to update our local books.
def _buy(self, type, product_id, funds, price = None, post_only = True, time_in_force = "GTC"):
with self.__order_lock:
if type == self.MARKET:
response = self._request(self._auth_client.buy, type = self.MARKET, product_id = product_id, funds = str(funds))
elif type == self.LIMIT:
product = product_id[:3]
min_denom = self.MIN_DENOM[product_id[:3]]
if not post_only:
funds = funds * (Decimal(1) - self.FEE_STRUCTURE[product_id])
funds = funds.quantize(Decimal("0.01"), rounding = ROUND_DOWN)
size = funds / price
size = size.quantize(min_denom, rounding = ROUND_DOWN)
response = self._request(self._auth_client.buy, type = self.LIMIT, product_id = product_id,
price = str(price), size = str(size), post_only = post_only, time_in_force = time_in_force)
elif type == self.STOP:
response = self._request(self._auth_client.buy, type = self.STOP, product_id = product_id,
funds = str(funds), price = str(price))
if response[self.STATUS] == self.REJECTED:
if response[self.REJECT_REASON] == self.POST_ONLY:
self.__logger.info("Buy order was rejected (post only): " + str(response))
return None
else:
self.__logger.critical("Buy order was rejected for unknown reason: " + str(response))
raise RuntimeException("Buy order was rejected for unknown reason: " + str(response))
else:
order = Order(self.BUY, type, product_id)
order.order_id = response[self.ID]
order.hold_amount = funds
if type == self.LIMIT:
order.price = price
order.post_only = post_only
currency = product_id[-3:]
self.available[currency] -= order.hold_amount
self.on_hold[currency] += order.hold_amount
self.orders[order.order_id] = order
self.__logger.info("Buy order placed -- local record: " + str(order))
self._log_botstate()
return order.order_id
#Wrapper to _request() to place a sell order. After sending the request to GDAX servers, we update our local
#books with appropriate holds, balances, etc. We lock this function with __order_lock to prevent our order
#processing thread from accidentally ignoring messages it may receive about this order before we've had a
#chance to update our local books.
def _sell(self, type, product_id, size, price = None, post_only = True, time_in_force = "GTC"):
with self.__order_lock:
if type == self.MARKET:
response = self._request(self._auth_client.sell, type = self.MARKET, product_id = product_id, size = str(size))
elif type == self.LIMIT:
response = self._request(self._auth_client.sell, type = self.LIMIT, product_id = product_id,
size = str(size), price = str(price), post_only = post_only, time_in_force = time_in_force)
elif type == self.STOP:
response = self._request(self._auth_client.sell, type = self.STOP, product_id = product_id,
size = str(size), price = str(price))
if response[self.STATUS] == self.REJECTED:
if response[self.REJECT_REASON] == self.POST_ONLY:
self.__logger.info("Sell order was rejected (post only): " + str(response))
return None
else:
self.__logger.critical("Sell order was rejected for unknown reason: " + str(response))
raise RuntimeException("Sell order was rejected for unknown reason: " + str(response))
else:
order = Order(self.SELL, type, product_id)
order.order_id = response[self.ID]
order.hold_amount = size
if type == self.LIMIT:
order.price = price
order.post_only = post_only
product = product_id[:3]
self.available[product] -= order.hold_amount
self.on_hold[product] += order.hold_amount
self.orders[order.order_id] = order
self.__logger.info("Sell order placed -- local record: " + str(order))
self._log_botstate()
return order.order_id
#Wrapper to _request() to cancel an order. As expected, returns True if the order was successfully canceled;
#False otherwise. Note that orders may be canceled when they have been partially filled, and so this function
#will cancel the order and return True. If the order cannot be canceled, it is likely because it has already
#been completely filled (or previously canceled).
def _cancel_order(self, order_id):
with self.__order_lock:
response = self._request(self._auth_client.cancel_order, order_id = order_id)
if response == [None]:
return True
else:
self.__logger.warning("Unable to cancel order: " + order_id + ". " + str(response))
return False
#Wrapper to the "raw" request functions provided by AuthenticatedClient. The argument "request" should be
#a function pointer to the _auth_client function we wish to invoke; *args and **kwargs should be the
#arguments you want to pass the that function. This wrapper retries the request several times if it fails
#(likely due to network issues).
def _request(self, request, *args, **kwargs):
num_attempts = 0
successful = False
while not successful:
try:
num_attempts += 1
response = request(*args, **kwargs)
except Exception as e:
self.__logger.warning("Unable to request \"" + str(request) + "\" with arguments: " +
str(kwargs) + " because of exception raised: " + str(type(e)) + ", " + str(e) +
", " + str(e.args) + ". " + str(num_attempts) + " attempt(s) have been made.")
if num_attempts > 10:
self.__logger.critical("Unable to request \"" + str(request) + "\" after " + str(num_attempts) + " attempts")
raise RuntimeError("Unable to request \"" + str(request) + "\" after " + str(num_attempts) + " attempts")
else:
if self.MESSAGE in response and response[self.MESSAGE] == self.INTERNAL_SERVER_ERROR:
successful = False
self.__logger.debug("GDAX responded with an interal server error while attempting to request: \"" + str(request) + "\" with arguments " +
str(args) + " " + str(kwargs) + ". " + str(num_attempts) + " attempt(s) have been made.")
else:
successful = True
self.__logger.debug("Successfully requested \"" + str(request) + "\" with arguments " + str(args) + " " + str(kwargs) + " after " +
str(num_attempts) + " attempt(s). " + "Server response: " + str(response))
return response
#Settle a filled or canceled order locally. This function is called from our order processing thread
#when it receives a messages from the feed indictaing that an order is complete. This function updates
#all local books appropriately, including for cases when a partially filled order was canceled.
def _settle(self, order_id):
self.__logger.info("Settling order " + order_id)
order = self.orders[order_id]
product_id = order.product_id
side = order.side
type = order.type
hold_amount = order.hold_amount
post_only = order.post_only
filled_size = order.filled_size
fill_fees = order.fill_fees
if order.type in [self.STOP, self.MARKET]:
executed_value = Decimal(0)
for fill_price in self.orders[order_id].fills:
executed_value += Decimal(fill_price) * self.orders[order_id].fills[fill_price]
else:
executed_value = order.price * filled_size
self.orders[order_id].executed_value = executed_value
self.orders[order_id].fill_fees = fill_fees
if side == self.BUY:
self.available[product_id[:3]] += filled_size
self.on_hold[product_id[-3:]] -= hold_amount
self.available[product_id[-3:]] += hold_amount - (executed_value + fill_fees)
self.orders[order_id].final_cost = executed_value + fill_fees #final_cost > 0 means that we spent currency
elif side == self.SELL:
self.available[product_id[-3:]] += executed_value - fill_fees
self.on_hold[product_id[:3]] -= hold_amount
self.available[product_id[:3]] += hold_amount - filled_size
self.orders[order_id].final_cost = fill_fees - executed_value #final_cost < 0 means that we gained currency
self.total_cost[product_id[:3]][product_id[-3:]] += self.orders[order_id].final_cost
self._update_avg_costs()
self.__logger.info("Settled order: " + order_id)
self._log_botstate()
self.orders[order_id].settled = True
#Simple helper function to update the average costs of our products; called at the end _settle()
def _update_avg_costs(self):
for product_id in self.product_ids:
product = product_id[:3]
currency = product_id[-3:]
product_held = self.available[product] + self.on_hold[product]
if product_held > 0:
self.avg_cost[product][currency] = self.total_cost[product][currency] / product_held
else:
self.avg_cost[product][currency] = 0
self.total_cost[product][currency] = 0
#Returns relevant bookkeeping information for ease of logging
def get_current_botstate(self):
botstate = {"Initial bankroll": self.bankroll,
"Current round": self.current_round,
"Available": self.available,
"On hold": self.on_hold,
"Total cost": self.total_cost,
"Average cost": self.avg_cost}
return botstate
#Log the botstate
def _log_botstate(self):
botstate = self.get_current_botstate()
self.__logger.info(simplejson.dumps(botstate, sort_keys = True, indent = 4))
#TODO remove this once we are confident
accounts = self._request(self._auth_client.get_accounts)
self.__logger.debug(str(accounts))
#Wait for threads to join, stop the websocket client
def _clean_up(self):
self.__logger.debug("Blocking until all threads are joined")
self.__wsclient.ticker_msgs.put(None)
self.__wsclient.order_msgs.put(None)
self.__order_processing_thread.join()
self.__ticker_processing_thread.join()
self.__logger.debug("All threads joined")
self.__wsclient.stop()
self.__logger.debug("Websocket client stopped")
self.__logger.debug(str(threading.active_count()) + " active threads: " + str(threading.enumerate()))
self.__logger.info("Shutdown complete")
#The main trading loop; loops indefinitely in its own thread until stop() is called. Each iteration,
#we check with the websocket client to determine if any messages were dropped and if so, we call
#_rectify_orders() here to update our local books as necessary.
def _trade(self):
self.__logger.info("Beginning trading...")
while self.__trading:
self._init_strategy()
while self.round_ongoing:
self._iterate_strategy()
self.__logger.info("Trading round " + str(self.current_round) + " completed.")
self._log_botstate()
self.orders = dict()
self.current_round += 1
self.__logger.info("Beginning trading round " + str(self.current_round))
self.__logger.info("Trading complete after " + str(self.current_round + 1) + " round(s)")
#Start trading
def start(self):
self.__logger.debug("BotBase starting...")
self.__wsclient.start()
while not self.__wsclient.ready:
pass
self.__order_processing_thread.start()
self.__ticker_processing_thread.start()
self.__trading_thread.start()
#Stop trading
def stop(self):
self.__trading = False
self.__logger.info("Trading will halt after the current round is completed...")
self.__trading_thread.join()
self._clean_up()
#This function must be implemented by the child class. The function is called once at the beginning of every trading round.
#If the strategy employed by the child class needs to perform any initialization for the beginning of the trading round
#(such placing an initial order to acquire a commodity), it should be done in this function. The implementation must
#then set self.round_ongoing = True to signal that the round should begin. If initialization fails for some reason,
#the child should set self.round_ongoing = False to signal that the round should not beging.
def _init_strategy(self):
raise NotImplementedError("This function must be implemented by any class inheriting from BotBase")
#This function must be implemented by the child class. This function is called once per iteration in the main trading
#loop. The logic of this function is determined by the strategy that the child class employs, but typically this
#function will evaluate our current orders/holdings/balances and the current market conditions, then place orders
#determined by the rules of the strategy being employed. Once the round is complete (that is, the goals for the
#trading round have been met), this function must set self.round_ongoing = False to signal that the round should end.
def _iterate_strategy(self):
raise NotImplementedError("This function must be implemented by any class inheriting from BotBase")
| {
"content_hash": "ca58d99a5a5bc04698275cae3cb76652",
"timestamp": "",
"source": "github",
"line_count": 567,
"max_line_length": 157,
"avg_line_length": 51.232804232804234,
"alnum_prop": 0.5810527040517746,
"repo_name": "bbutch/gtrade",
"id": "6452b2ac350dd7eef9fdcb9ca159a2e2a117afe0",
"size": "29095",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gtrade/botbase.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "58581"
}
],
"symlink_target": ""
} |
import numpy as np
from sklearn.cluster import KMeans
import csv
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import CountVectorizer
from sklearn import tree
from sklearn.cross_validation import train_test_split
from sklearn.cross_validation import ShuffleSplit
from sklearn.utils import shuffle
import numpy
import random
import pandas as pd
import postprocessing
import sys
import warnings
warnings.filterwarnings("ignore")
def QuickReduct(C,D):
gammaCD = 0
for t in D:
for num in t:
gammaCD = gammaCD+num
gammaRD = 0
T = []
R = []
while gammaRD < gammaCD:
T = R
X = list(set(C) - set(R))
for index in range(len(X)):
gammaRXD = gammaRD
for num in range(len(D)):
gammaRXD = gammaRXD+D[num][index]
if(gammaRXD > gammaRD):
R.append(X[index])
T = R
gammaRD = gammaRXD
R = T
return R
def trainTrees(numTrees, Xt, yt):
X_train, X_test, y_train, y_test = train_test_split(Xt, yt, test_size=0.500000, random_state=42)
half = len(y_train)
sampleSize = (int)(half*0.8)
#can be changed
DT = []
#create pool of decision trees
for count in range(numTrees):
tr,tt,te,TT = train_test_split(X_train, y_train, test_size=0.125, random_state=42)
dt = tree.DecisionTreeClassifier()
dt.fit(tr,te)
DT.append(dt)
DecisionTable = [[0 for x in range(numTrees)]for x in range(half)]
for index in range(numTrees):
for doc in range(half):
dt = DT[index]
b = dt.predict(X_train.iloc[doc])
DecisionTable[doc][index] = 1 if y_train.iloc[doc] == b else 0
slt = QuickReduct(DT,DecisionTable)
return slt
def randomForest(file):
prepData = pd.read_csv(file)#,sep = ' ')
print 'finished read data'
headers = prepData.columns.values
#print("heaaders....")
#print(headers)
features = np.delete(headers,-1)
#print type(features)
#features = np.delete(headers,0)
#print("headers -1 ...")
#print(len(headers))
targetH = headers[-1]
#print(targetH)
data = prepData[features]
target = prepData[targetH]
#print(target)
countCat = max(target)+1
#print data
#print target
print 'start training'
X_train, X_test, y_train, y_test = train_test_split(data, target, test_size=0.200000, random_state=42)
pool = trainTrees(3,X_train,y_train)
#print len(pool)
print 'finished training'
correctly = 0.0
"""create confusion table of this training"""
confTable = [[[0 for x in range(2)]for x in range(2)]for x in range(countCat)]
for index in range(len(y_test)):
countResult = [0 for x in range(countCat)]
for t in pool:
result = t.predict(X_test.iloc[index])
countResult[result] = countResult[result]+1
maxIndex = countResult.index(max(countResult))
#print type(y_test)
yti = y_test.iloc[index]
#print yti, maxIndex
if(yti == maxIndex):
correctly = correctly+1
confTable[yti][0][0] = confTable[yti][0][0]+1 #TP
for i in range(countCat):
if i != yti:
confTable[i][1][1] = confTable[i][1][1]+1 #TN of else
else:
confTable[yti][0][1] = confTable[yti][0][1]+1 #FP of index
confTable[maxIndex][1][0] = confTable[maxIndex][1][0]+1 #FN of maxIndex
"""start computing accuracy, precision, recall"""
TP,TN,FN,FP = 0,0,0,0
print 'finished'
for index in range(countCat):
#print index
TP = TP+confTable[index][0][0]
TN = TN+confTable[index][1][1]
FN = FN+confTable[index][1][0]
FP = FP+confTable[index][0][1]
#print TP, FP, FN, TN
TP = (float)(TP)/(float)(len(data))
FP = (float)(FP)/(float)(len(data))
FN = (float)(FN)/(float)(len(data))
TN = (float)(TN)/(float)(len(data))
precision = TP/(TP+FP)
recall = TP/(TP+FN)
print ("precision %f" %(precision))
print ("recall %f" %(recall))
print ("correctly classified %f" %(correctly/len(y_test)))
#this is the accuracy by definition, include the TN into computation
print ("accuracy %f" %((TP+TN)/(TP+FP+FN+TN)))
return pool,confTable,precision, recall, (correctly/len(y_test))
"""print confusion table of this training"""
def printConfusionTable(confTable):
for i in range(len(confTable)):
print i
try:
precision = (float)(confTable[i][0][0])/(float)(confTable[i][0][0]+confTable[i][0][1])
except ZeroDivisionError:
precision = 'not measuarable'
try:
recall = (float)(confTable[i][0][0])/(float)(confTable[i][0][0]+confTable[i][1][0])
except ZeroDivisionError:
recall = 'not measurable'
print '['+str(confTable[i][0][0])+'|'+str(confTable[i][0][1])+']'
print '['+str(confTable[i][1][0])+'|'+str(confTable[i][1][1])+']'
print "precision ", precision
print "recall ", recall
"""predicted another instance"""
def randomForestPredicted(X,numCat,pool):
y = []
for index in range(len(X)):
countResult = [0 for x in range(numCat)]
for t in pool:
try:
result = t.predict(X.iloc[index])
countResult[result] = countResult[result]+1
except ValueError:
pass
maxIndex = countResult.index(max(countResult))
y.append(maxIndex)
return y
if __name__ == '__main__':
p, ct = randomForest("preprocessed_data.csv")
printConfusionTable(ct)
testData = pd.read_csv('preprocessed_testing.csv')#_cut.csv')
#print testData
#data = pd.np.array(testData)
#print data
#data = np.delete(data,-1,axis=0)
y = randomForestPredicted(testData, 39, p)
#print y
from postprocessing import writeOutputToCSV
writeOutputToCSV('run_10trees.csv',y,'dictionary.txt')
#printconfusiontablentConfusionTable()
#print randomForestPredicted([X_test.iloc[0],X_test.iloc[2]])
#print y_test.iloc[0], y_test.iloc[2]
| {
"content_hash": "b2063158637fd82eff843122348bd606",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 106,
"avg_line_length": 30.160194174757283,
"alnum_prop": 0.5974569451150813,
"repo_name": "HugoLG/SFCrimeClassification",
"id": "e16992da471ea14d5af06611499974ef38cb7dd0",
"size": "6213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "randomforest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "24509"
},
{
"name": "Python",
"bytes": "39589"
}
],
"symlink_target": ""
} |
#!/usr/bin/python3.4
# -*-coding:Utf-8 -*
'''A script to be executed by Blender Python API to return Blender version'''
import bpy
print('<?xml version="1.0" encoding="UTF-8"?>')
print('<root>')
version = bpy.app.version
print('<version version="'+str(version[0])+'.'+str(version[1])+'-'\
+bpy.app.version_char+'" />')
print('</root>')
| {
"content_hash": "44395e56da4a5817b0a4d2dabcf50283",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 77,
"avg_line_length": 31.181818181818183,
"alnum_prop": 0.6326530612244898,
"repo_name": "CaptainDesAstres/Blender-Render-Manager",
"id": "685e54775f4685cf891d06f406ca5e6313dd6f24",
"size": "343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Preferences/getter/getBlenderVersion.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "184095"
}
],
"symlink_target": ""
} |
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('image46.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file with image(s)."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'red.png', {'object_position': 4, 'y_offset': 4})
worksheet.set_row(8, 30, None, {'hidden': True})
workbook.close()
self.assertExcelEqual()
| {
"content_hash": "9d69ab8afd5d64bc4725c7e8a278c7c9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 103,
"avg_line_length": 25.535714285714285,
"alnum_prop": 0.6517482517482518,
"repo_name": "jmcnamara/XlsxWriter",
"id": "36a541d2a6830142b31704fd739cb1e2351370d1",
"size": "928",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "xlsxwriter/test/comparison/test_image46.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5113"
},
{
"name": "CSS",
"bytes": "16544"
},
{
"name": "HTML",
"bytes": "13100"
},
{
"name": "Makefile",
"bytes": "7748"
},
{
"name": "Perl",
"bytes": "3503"
},
{
"name": "Python",
"bytes": "2807230"
},
{
"name": "Shell",
"bytes": "7964"
}
],
"symlink_target": ""
} |
from setuptools import setup
#with open("README.rst", 'r') as readme_file:
# readme = readme_file.read()
readme = """Noto font tools are a set of scripts useful for release
engineering of Noto and similar fonts"""
setup(name='nototools',
version='0.0.1',
description='Noto font tools',
license="Apache",
long_description=readme,
author='Noto Authors',
author_email='[email protected]',
url='https://code.google.com/p/noto/',
# more examples here http://docs.python.org/distutils/examples.html#pure-python-distribution-by-package
packages=['nototools'],
install_requires=[
'fontTools',
# On Mac OS X these need to be installed with homebrew
# 'cairo',
# 'pango',
# 'pygtk'
],
dependency_links=['https://github.com/behdad/fontTools/tarball/master#egg=fontTools-2.5'],
package_data={
'nototools': [
'nototools/*.sh',
]
},
# $ grep "def main(" nototools/* | cut -d: -f1
scripts=['nototools/autofix_for_release.py',
'nototools/coverage.py',
'nototools/create_image.py',
'nototools/decompose_ttc.py',
'nototools/drop_hints.py',
'nototools/dump_otl.py',
'nototools/fix_khmer_and_lao_coverage.py',
'nototools/fix_noto_cjk_thin.py',
'nototools/generate_sample_text.py',
'nototools/generate_website_data.py',
'nototools/merge_noto.py',
'nototools/noto_lint.py',
'nototools/scale.py',
'nototools/subset.py',
'nototools/subset_symbols.py',
'nototools/test_vertical_extents.py'])
| {
"content_hash": "37ceb2dee4f09d3daaa69fe5f0d146f3",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 109,
"avg_line_length": 38,
"alnum_prop": 0.5666293393057111,
"repo_name": "davelab6/nototools",
"id": "9348f2433f7ca4607c4575da60b0286933ba473d",
"size": "1834",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "620"
},
{
"name": "Makefile",
"bytes": "3424"
},
{
"name": "Python",
"bytes": "492596"
},
{
"name": "Shell",
"bytes": "3802"
}
],
"symlink_target": ""
} |
import os
import multiprocessing
import serial
def start_dosenet():
os.system('sudo bash /home/pi/dosenet-raspberrypi/pocket.sh start')
def start_D3S():
os.system('sudo bash /home/pi/dosenet-raspberrypi/D3S.sh start')
if __name__ == '__main__':
print('Waiting for NTP to be synced...')
os.system('sudo service ntp stop')
os.system('sudo timeout 60s ntpd -gq')
os.system('sudo service ntp start')
try:
ser = serial.Serial('/dev/ttyACM0')
ser.flushInput()
ser.close()
except:
pass
p = multiprocessing.Process(target=start_D3S, args=())
t = multiprocessing.Process(target=start_dosenet, args=())
try:
print('Starting D3S script process')
p.start()
print('Starting Pocket Geiger script process')
t.start()
print('started')
p.join()
t.join()
print('we can reboot here')
except:
pass
| {
"content_hash": "fa99e3c52b67675e8fc9edfba053204e",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 71,
"avg_line_length": 25.62162162162162,
"alnum_prop": 0.5991561181434599,
"repo_name": "tybtab/dosenet-raspberrypi",
"id": "0c36df2924a961f72d314365378fd9f33e936bd6",
"size": "970",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "master_manager.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "123501"
},
{
"name": "Shell",
"bytes": "12371"
}
],
"symlink_target": ""
} |
"""
Person Registry, Controllers
@see: U{http://eden.sahanafoundation.org/wiki/BluePrintVITA}
"""
module = request.controller
resourcename = request.function
# -----------------------------------------------------------------------------
# Options Menu (available in all Functions' Views)
def s3_menu_postp():
# @todo: rewrite this for new framework
menu_selected = []
group_id = s3mgr.get_session("pr", "group")
if group_id:
group = s3db.pr_group
query = (group.id == group_id)
record = db(query).select(group.id, group.name, limitby=(0, 1)).first()
if record:
name = record.name
menu_selected.append(["%s: %s" % (T("Group"), name), False,
URL(f="group",
args=[record.id])])
person_id = s3mgr.get_session("pr", "person")
if person_id:
person = s3db.pr_person
query = (person.id == person_id)
record = db(query).select(person.id, limitby=(0, 1)).first()
if record:
person_represent = s3db.pr_person_represent
name = person_represent(record.id)
menu_selected.append(["%s: %s" % (T("Person"), name), False,
URL(f="person",
args=[record.id])])
if menu_selected:
menu_selected = [T("Open recent"), True, None, menu_selected]
response.menu_options.append(menu_selected)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
try:
module_name = settings.modules[module].name_nice
except:
module_name = T("Person Registry")
# Load Model
s3db.table("pr_address")
def prep(r):
if r.representation == "html":
if not r.id and not r.method:
r.method = "search"
else:
redirect(URL(f="person", args=request.args))
return True
s3.prep = prep
def postp(r, output):
if isinstance(output, dict):
# Add information for Dashboard
pr_gender_opts = s3db.pr_gender_opts
pr_age_group_opts = s3db.pr_age_group_opts
table = db.pr_person
gender = []
for g_opt in pr_gender_opts:
query = (table.deleted == False) & \
(table.gender == g_opt)
count = db(query).count()
gender.append([str(pr_gender_opts[g_opt]), int(count)])
age = []
for a_opt in pr_age_group_opts:
query = (table.deleted == False) & \
(table.age_group == a_opt)
count = db(query).count()
age.append([str(pr_age_group_opts[a_opt]), int(count)])
total = int(db(table.deleted == False).count())
output.update(module_name=module_name,
gender=json.dumps(gender),
age=json.dumps(age),
total=total)
if r.interactive:
if not r.component:
label = READ
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)("[id]")
s3.actions = [
dict(label=str(label), _class="action-btn", url=str(linkto))
]
r.next = None
return output
s3.postp = postp
output = s3_rest_controller("pr", "person")
response.view = "pr/index.html"
response.title = module_name
return output
# -----------------------------------------------------------------------------
def person():
""" RESTful CRUD controller """
# Enable this to allow migration of users between instances
#s3.filter = (s3db.pr_person.pe_id == s3db.pr_person_user.pe_id) & \
#(s3db.auth_user.id == s3db.pr_person_user.user_id) & \
#(s3db.auth_user.registration_key != "disabled")
# Organisation Dependent Fields
set_org_dependent_field = settings.set_org_dependent_field
set_org_dependent_field("pr_person_details", "father_name")
set_org_dependent_field("pr_person_details", "mother_name")
set_org_dependent_field("pr_person_details", "affiliations")
set_org_dependent_field("pr_person_details", "company")
# Custom Method for Contacts
s3db.set_method(module, resourcename,
method="contacts",
action=s3db.pr_contacts)
def prep(r):
if r.representation == "json" and \
not r.component and session.s3.filter_staff:
person_ids = session.s3.filter_staff
session.s3.filter_staff = None
r.resource.add_filter = (~(db.pr_person.id.belongs(person_ids)))
elif r.interactive:
if r.representation == "popup":
# Hide "pe_label" and "missing" fields in person popups
r.table.pe_label.readable = False
r.table.pe_label.writable = False
r.table.missing.readable = False
r.table.missing.writable = False
# S3SQLCustomForm breaks popup return, so disable
s3db.clear_config("pr_person", "crud_form")
if r.component_name == "config":
_config = s3db.gis_config
s3db.gis_config_form_setup()
# Name will be generated from person's name.
_config.name.readable = _config.name.writable = False
# Hide Location
_config.region_location_id.readable = _config.region_location_id.writable = False
elif r.component_name == "competency":
ctable = s3db.hrm_competency
ctable.organisation_id.writable = False
ctable.skill_id.comment = None
elif r.component_name == "saved_search":
if r.method == "load":
if r.component_id:
table = db.pr_saved_search
record = db(table.id == r.component_id).select(table.url,
limitby=(0, 1)
).first()
if record:
redirect(record.url)
else:
raise HTTP(404)
elif r.id:
r.table.volunteer.readable = True
r.table.volunteer.writable = True
return True
s3.prep = prep
def postp(r, output):
if r.component_name == "saved_search":
s3_action_buttons(r)
s3.actions.append(
dict(url=URL(args=r.args + ["[id]", "load"]),
label=str(T("Load")),
_class="action-btn")
)
return output
s3.postp = postp
s3db.configure("pr_group_membership",
list_fields=["id",
"group_id",
"group_head",
"description"
])
# Basic tabs
tabs = [(T("Basic Details"), None),
(T("Address"), "address"),
#(T("Contacts"), "contact"),
(T("Contact Details"), "contacts"),
(T("Images"), "image"),
(T("Identity"), "identity"),
(T("Education"), "education"),
(T("Groups"), "group_membership"),
(T("Journal"), "note"),
(T("Skills"), "competency"),
(T("Training"), "training"),
(T("Saved Searches"), "saved_search"),
]
# Configuration tabs
tabs.append((T("Map Settings"), "config"))
s3db.configure("pr_person", listadd=False, insertable=True)
output = s3_rest_controller(main="first_name",
extra="last_name",
rheader=lambda r: \
s3db.pr_rheader(r, tabs=tabs))
return output
# -----------------------------------------------------------------------------
def address():
"""
RESTful controller to allow creating/editing of address records within
contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id and controller:
s3db.configure("pr_address",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_address.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def contact():
"""
RESTful controller to allow creating/editing of contact records within
contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id:
s3db.configure("pr_contact",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_contact.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def contact_emergency():
"""
RESTful controller to allow creating/editing of emergency contact
records within contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id:
s3db.configure("pr_contact_emergency",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_contact_emergency.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def person_search():
"""
Person REST controller
- limited to just search.json for use in Autocompletes
- allows differential access permissions
"""
s3.prep = lambda r: r.representation == "json" and \
r.method == "search"
return s3_rest_controller(module, "person")
# -----------------------------------------------------------------------------
def group():
""" RESTful CRUD controller """
tablename = "pr_group"
table = s3db[tablename]
s3.filter = (table.system == False) # do not show system groups
s3db.configure("pr_group_membership",
list_fields=["id",
"person_id",
"group_head",
"description"
])
rheader = lambda r: s3db.pr_rheader(r, tabs = [(T("Group Details"), None),
(T("Address"), "address"),
(T("Contact Data"), "contact"),
(T("Members"), "group_membership")
])
output = s3_rest_controller(rheader=rheader)
return output
# -----------------------------------------------------------------------------
def image():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def education():
""" RESTful CRUD controller """
tablename = "pr_education"
table = s3db[tablename]
return s3_rest_controller("pr", "education")
# -----------------------------------------------------------------------------
#def contact():
# """ RESTful CRUD controller """
#
# table = s3db.pr_contact
#
# table.pe_id.label = T("Person/Group")
# table.pe_id.readable = True
# table.pe_id.writable = True
#
# return s3_rest_controller()
# -----------------------------------------------------------------------------
def presence():
"""
RESTful CRUD controller
- needed for Map Popups (no Menu entry for direct access)
@deprecated - People now use Base Location pr_person.location_id
"""
table = s3db.pr_presence
# Settings suitable for use in Map Popups
table.pe_id.readable = True
table.pe_id.label = "Name"
table.pe_id.represent = s3db.pr_person_represent
table.observer.readable = False
table.presence_condition.readable = False
# @ToDo: Add Skills
return s3_rest_controller()
# -----------------------------------------------------------------------------
def pentity():
"""
RESTful CRUD controller
- limited to just search.json for use in Autocompletes
"""
s3.prep = lambda r: r.representation in ("s3json", "json", "xml")
return s3_rest_controller()
# -----------------------------------------------------------------------------
def affiliation():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def role():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def tooltip():
""" Ajax tooltips """
if "formfield" in request.vars:
response.view = "pr/ajaxtips/%s.html" % request.vars.formfield
return dict()
# -----------------------------------------------------------------------------
def saved_search():
"""
REST controller for saving and loading saved searches
"""
return s3_rest_controller()
# END =========================================================================
| {
"content_hash": "3778dc265d9de241c522bfdde5fcb3c4",
"timestamp": "",
"source": "github",
"line_count": 439,
"max_line_length": 97,
"avg_line_length": 35.970387243735765,
"alnum_prop": 0.4453802799062757,
"repo_name": "madhurauti/Map-Polygon",
"id": "87d93ba6dee997d30446d6fd0a17ef32c3fb8076",
"size": "15816",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "controllers/pr.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "15527353"
},
{
"name": "PHP",
"bytes": "15220"
},
{
"name": "Perl",
"bytes": "2202"
},
{
"name": "Python",
"bytes": "23300695"
},
{
"name": "Racket",
"bytes": "166"
}
],
"symlink_target": ""
} |
"""
There are two definitions of the usable area on the SWIR detector:
'illuminated':
Detector area illuminated by external sources, defined as
an rectangular area where the signal is at least 50% of the
maximum signal. Coordinates: rows [11:228], columns [16:991].
'level2':
A smaller area used in official SWIR level 1B (ir)radiance
products. Coordinates: rows [12:227], columns [20:980].
Notes
-----
Row 257 of the SWIR detector is neglected.
"""
__all__ = ['coords', 'mask']
import numpy as np
def coords(mode='illuminated', band='78') -> slice:
"""
Return slice defining the illuminated region on the SWIR detector
Parameters
----------
mode : {'illuminated', 'level2'}, optional
default is 'illuminated'
band : str, optional
select band 7 or 8, default is both bands
"""
if mode == 'level2':
if band == '7':
return np.s_[12:227, 20:500]
if band == '8':
return np.s_[12:227, :480]
# else
return np.s_[12:227, 20:980]
if band == '7':
return np.s_[11:228, 16:500]
if band == '8':
return np.s_[11:228, :491]
# else
return np.s_[11:228, 16:991]
def mask(mode='illuminated', band='78'):
"""
Return mask of the illuminated region, where the value of the illuminated
pixels are set to True.
Parameters
----------
mode : {'illuminated', 'level2'}, optional
default is 'illuminated'
band : str, optional
select band 7 or 8, default is both bands
"""
if band in ('7', '8'):
res = np.full((256, 500), False)
else:
res = np.full((256, 1000), False)
res[coords(mode, band)] = True
return res
| {
"content_hash": "c473f8cbf37c4ca694fbf4841562cb50",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 77,
"avg_line_length": 25.63235294117647,
"alnum_prop": 0.5846242111302352,
"repo_name": "rmvanhees/pys5p",
"id": "19eec578ae8d08423c87a71de5e7c14be2f3a40c",
"size": "1942",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/pys5p/swir_region.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "176223"
}
],
"symlink_target": ""
} |
from instruments.AWGBase import AWG, AWGDriver
from atom.api import Int, Constant
class Tek7000(AWG):
numChannels = Int(default=2)
seqFileExt = Constant('.awg')
translator = Constant('TekPattern')
| {
"content_hash": "b0ac4b1e247d090cd1784d545259ca7d",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 46,
"avg_line_length": 28.714285714285715,
"alnum_prop": 0.7661691542288557,
"repo_name": "rmcgurrin/PyQLab",
"id": "401bc8c71a6f7b9abbd8e90c6e18c3ca4a32f75b",
"size": "201",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "instruments/drivers/Tek7000.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "104179"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(name = 'simpleon',
version ='0.2.0',
description = "SimpleON (Simple Object Notation) format decoder",
author = "Xinhao Yuan",
author_email = "[email protected]",
license = "MIT",
packages = ['simpleon' ],
package_dir = { 'simpleon' : 'simpleon-py' }
)
| {
"content_hash": "006ebb1a21b62d7306adb26792d6c25f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 71,
"avg_line_length": 31.272727272727273,
"alnum_prop": 0.5988372093023255,
"repo_name": "xinhaoyuan/simpleon",
"id": "8a7a6fdbe04eb770d603bd08c0ae511c6745c839",
"size": "344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "21333"
},
{
"name": "CMake",
"bytes": "200"
},
{
"name": "Python",
"bytes": "17540"
}
],
"symlink_target": ""
} |
"""Approvals checking logic."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from grr_response_core import config
from grr_response_core.lib import rdfvalue
from grr_response_server import access_control
from grr_response_server import data_store
from grr_response_server.authorization import client_approval_auth
from grr_response_server.rdfvalues import objects as rdf_objects
def BuildLegacySubject(subject_id, approval_type):
"""Builds a legacy AFF4 urn string for a given subject and approval type."""
at = rdf_objects.ApprovalRequest.ApprovalType
if approval_type == at.APPROVAL_TYPE_CLIENT:
return "aff4:/%s" % subject_id
elif approval_type == at.APPROVAL_TYPE_HUNT:
return "aff4:/hunts/%s" % subject_id
elif approval_type == at.APPROVAL_TYPE_CRON_JOB:
return "aff4:/cron/%s" % subject_id
raise ValueError("Invalid approval type.")
def _CheckExpired(approval_request):
if approval_request.expiration_time < rdfvalue.RDFDatetime.Now():
raise access_control.UnauthorizedAccess(
"Approval request is expired.",
subject=BuildLegacySubject(approval_request.subject_id,
approval_request.approval_type))
def _CheckHasEnoughGrants(approval_request):
approvers_required = config.CONFIG["ACL.approvers_required"]
approvers = set(g.grantor_username for g in approval_request.grants)
missing = approvers_required - len(approvers)
if missing > 0:
msg = ("Need at least %d additional approver%s for access." %
(missing, "s" if missing > 1 else ""))
raise access_control.UnauthorizedAccess(
msg,
subject=BuildLegacySubject(approval_request.subject_id,
approval_request.approval_type))
def _CheckHasAdminApprovers(approval_request):
grantors = set(g.grantor_username for g in approval_request.grants)
for g in grantors:
user_obj = data_store.REL_DB.ReadGRRUser(g)
if user_obj.user_type == user_obj.UserType.USER_TYPE_ADMIN:
return True
raise access_control.UnauthorizedAccess(
"Need at least 1 admin approver for access.",
subject=BuildLegacySubject(approval_request.subject_id,
approval_request.approval_type))
def CheckClientApprovalRequest(approval_request):
"""Checks if a client approval request is granted."""
_CheckExpired(approval_request)
_CheckHasEnoughGrants(approval_request)
if not client_approval_auth.CLIENT_APPROVAL_AUTH_MGR.IsActive():
return True
token = access_control.ACLToken(username=approval_request.requestor_username)
approvers = set(g.grantor_username for g in approval_request.grants)
labels = sorted(
data_store.REL_DB.ReadClientLabels(approval_request.subject_id),
key=lambda l: l.name)
for label in labels:
client_approval_auth.CLIENT_APPROVAL_AUTH_MGR.CheckApproversForLabel(
token, rdfvalue.RDFURN(approval_request.subject_id),
approval_request.requestor_username, approvers, label.name)
return True
def CheckHuntApprovalRequest(approval_request):
"""Checks if a hunt approval request is granted."""
_CheckExpired(approval_request)
_CheckHasEnoughGrants(approval_request)
_CheckHasAdminApprovers(approval_request)
def CheckCronJobApprovalRequest(approval_request):
"""Checks if a cron job approval request is granted."""
_CheckExpired(approval_request)
_CheckHasEnoughGrants(approval_request)
_CheckHasAdminApprovers(approval_request)
def CheckApprovalRequest(approval_request):
"""Checks if an approval request is granted."""
at = rdf_objects.ApprovalRequest.ApprovalType
if approval_request.approval_type == at.APPROVAL_TYPE_CLIENT:
return CheckClientApprovalRequest(approval_request)
elif approval_request.approval_type == at.APPROVAL_TYPE_HUNT:
return CheckHuntApprovalRequest(approval_request)
elif approval_request.approval_type == at.APPROVAL_TYPE_CRON_JOB:
return CheckCronJobApprovalRequest(approval_request)
else:
raise ValueError(
"Invalid approval type: %s" % approval_request.approval_type)
| {
"content_hash": "f5fadca76f3500cee2a20f34fd2b41f9",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 79,
"avg_line_length": 36.11304347826087,
"alnum_prop": 0.7358535998073682,
"repo_name": "dunkhong/grr",
"id": "c61286f25c104f0225482a967c2c081d189b25ce",
"size": "4175",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "grr/server/grr_response_server/gui/approval_checks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "882"
},
{
"name": "C",
"bytes": "11321"
},
{
"name": "C++",
"bytes": "54535"
},
{
"name": "CSS",
"bytes": "36745"
},
{
"name": "Dockerfile",
"bytes": "1822"
},
{
"name": "HCL",
"bytes": "8451"
},
{
"name": "HTML",
"bytes": "193751"
},
{
"name": "JavaScript",
"bytes": "12795"
},
{
"name": "Jupyter Notebook",
"bytes": "199190"
},
{
"name": "Makefile",
"bytes": "3139"
},
{
"name": "PowerShell",
"bytes": "1984"
},
{
"name": "Python",
"bytes": "7430923"
},
{
"name": "Roff",
"bytes": "444"
},
{
"name": "Shell",
"bytes": "49155"
},
{
"name": "Standard ML",
"bytes": "8172"
},
{
"name": "TSQL",
"bytes": "10560"
},
{
"name": "TypeScript",
"bytes": "56756"
}
],
"symlink_target": ""
} |
"""Beamformers for source localization."""
from ._lcmv import (make_lcmv, apply_lcmv, apply_lcmv_epochs, apply_lcmv_raw,
lcmv, lcmv_epochs, lcmv_raw, tf_lcmv)
from ._dics import dics, dics_epochs, dics_source_power, tf_dics
from ._rap_music import rap_music
| {
"content_hash": "15d374db6e4bd80c06b8282bd6b5bc48",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 77,
"avg_line_length": 46.5,
"alnum_prop": 0.6881720430107527,
"repo_name": "jaeilepp/mne-python",
"id": "4582fbd71a2013f619a351b94affb03353ac8a4b",
"size": "279",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mne/beamformer/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "69806"
},
{
"name": "Makefile",
"bytes": "3928"
},
{
"name": "Python",
"bytes": "6113850"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
"""autogenerated by genpy from gf_beacon/gf_encodingRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class gf_encodingRequest(genpy.Message):
_md5sum = "b4f5ff271c45bb829d5e504e08e16e34"
_type = "gf_beacon/gf_encodingRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float64 lng_deg
float64 lat_deg
float64 alt_agl_m
float64 speed_mph
float64 heading_deg
float64 battery_level
uint16 flying_state_on
uint16 return_to_home_state_on
uint16 forced_landing_state_on
"""
__slots__ = ['lng_deg','lat_deg','alt_agl_m','speed_mph','heading_deg','battery_level','flying_state_on','return_to_home_state_on','forced_landing_state_on']
_slot_types = ['float64','float64','float64','float64','float64','float64','uint16','uint16','uint16']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
lng_deg,lat_deg,alt_agl_m,speed_mph,heading_deg,battery_level,flying_state_on,return_to_home_state_on,forced_landing_state_on
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(gf_encodingRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.lng_deg is None:
self.lng_deg = 0.
if self.lat_deg is None:
self.lat_deg = 0.
if self.alt_agl_m is None:
self.alt_agl_m = 0.
if self.speed_mph is None:
self.speed_mph = 0.
if self.heading_deg is None:
self.heading_deg = 0.
if self.battery_level is None:
self.battery_level = 0.
if self.flying_state_on is None:
self.flying_state_on = 0
if self.return_to_home_state_on is None:
self.return_to_home_state_on = 0
if self.forced_landing_state_on is None:
self.forced_landing_state_on = 0
else:
self.lng_deg = 0.
self.lat_deg = 0.
self.alt_agl_m = 0.
self.speed_mph = 0.
self.heading_deg = 0.
self.battery_level = 0.
self.flying_state_on = 0
self.return_to_home_state_on = 0
self.forced_landing_state_on = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_6d3H().pack(_x.lng_deg, _x.lat_deg, _x.alt_agl_m, _x.speed_mph, _x.heading_deg, _x.battery_level, _x.flying_state_on, _x.return_to_home_state_on, _x.forced_landing_state_on))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 54
(_x.lng_deg, _x.lat_deg, _x.alt_agl_m, _x.speed_mph, _x.heading_deg, _x.battery_level, _x.flying_state_on, _x.return_to_home_state_on, _x.forced_landing_state_on,) = _get_struct_6d3H().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_6d3H().pack(_x.lng_deg, _x.lat_deg, _x.alt_agl_m, _x.speed_mph, _x.heading_deg, _x.battery_level, _x.flying_state_on, _x.return_to_home_state_on, _x.forced_landing_state_on))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 54
(_x.lng_deg, _x.lat_deg, _x.alt_agl_m, _x.speed_mph, _x.heading_deg, _x.battery_level, _x.flying_state_on, _x.return_to_home_state_on, _x.forced_landing_state_on,) = _get_struct_6d3H().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_6d3H = None
def _get_struct_6d3H():
global _struct_6d3H
if _struct_6d3H is None:
_struct_6d3H = struct.Struct("<6d3H")
return _struct_6d3H
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from gf_beacon/gf_encodingResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class gf_encodingResponse(genpy.Message):
_md5sum = "d37e4f1e46761defdf5b003341acd010"
_type = "gf_beacon/gf_encodingResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """string encoded
"""
__slots__ = ['encoded']
_slot_types = ['string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
encoded
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(gf_encodingResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.encoded is None:
self.encoded = ''
else:
self.encoded = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.encoded
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.encoded = str[start:end].decode('utf-8')
else:
self.encoded = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.encoded
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.encoded = str[start:end].decode('utf-8')
else:
self.encoded = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
class gf_encoding(object):
_type = 'gf_beacon/gf_encoding'
_md5sum = '916bededc1f7b96442f9b7ace1020840'
_request_class = gf_encodingRequest
_response_class = gf_encodingResponse
| {
"content_hash": "e9de13f5f9d82824b357f1011a80eb11",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 213,
"avg_line_length": 36.02230483271376,
"alnum_prop": 0.6380804953560372,
"repo_name": "geofrenzy/utm-mbsb",
"id": "e69ac3f38e28b4274891ae72a0ba4cfd8c6cb6b5",
"size": "9744",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ros-src/catkin_ws/install/lib/python2.7/dist-packages/gf_beacon/srv/_gf_encoding.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "608706"
},
{
"name": "C++",
"bytes": "58703"
},
{
"name": "CMake",
"bytes": "94973"
},
{
"name": "Common Lisp",
"bytes": "57647"
},
{
"name": "JavaScript",
"bytes": "18946"
},
{
"name": "Makefile",
"bytes": "161931"
},
{
"name": "Python",
"bytes": "139654"
},
{
"name": "Roff",
"bytes": "1117"
},
{
"name": "Shell",
"bytes": "14665"
}
],
"symlink_target": ""
} |
from PyQt4 import QtCore, QtGui, Qt
class DragLabel(QtGui.QLabel):
def __init__(self, text, parent):
super(DragLabel, self).__init__(text, parent)
self.setMinimumSize(7 * (len(self.text().encode('utf-8')) + len(self.text())), 30)
self.setAlignment(Qt.Qt.AlignCenter)
self.setAutoFillBackground(True)
self.setFrameShape(QtGui.QFrame.Panel)
self.setFrameShadow(QtGui.QFrame.Raised)
self.setStyleSheet("QLabel{"
"border:1px solid #000000;"
"background-color: #FF7F66;"
"height: 25px;"
"font-family: '微软雅黑';"
"color: #FFFFFF;"
"font-size: 14px;"
"}"
"QLabel:hover{"
"border:1px solid #9BBAAC;"
"}"
"QLabel:focus{"
"border:1px solid #7ECEFD;"
"}")
def mousePressEvent(self, event):
hotSpot = event.pos()
mimeData = QtCore.QMimeData()
mimeData.setText(self.text())
mimeData.setData('application/x-point',
'%d %d' % (self.pos().x(), self.pos().y()))
pixmap = QtGui.QPixmap(self.size())
self.render(pixmap)
drag = QtGui.QDrag(self)
drag.setMimeData(mimeData)
drag.setPixmap(pixmap)
drag.setHotSpot(hotSpot)
dropAction = drag.exec_(QtCore.Qt.CopyAction | QtCore.Qt.MoveAction, QtCore.Qt.CopyAction)
if dropAction == QtCore.Qt.MoveAction:
self.close()
self.update()
class OkTagBox(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
x = 25
y = 5
for word in "我的 熱門呢 誒反對 sdf sdf sdf sdfsdf sdfsd dfsf sdf sdf sdf sdfsdf sdfsd dfsf sdf sdf sdf sdf我的 熱門呢 誒反對 sdf sdf sdf sdfsdf sdfsd dfsf sdf sdf sdf sdfsdf sdfsd dfsf sdf sdf sdf sdf".split():
wordLabel = DragLabel(word, self)
if x >= (self.size().width() - wordLabel.minimumWidth()):
x = 25
y += 32
wordLabel.move(x, y)
wordLabel.show()
x += wordLabel.minimumWidth() + 2
newPalette = self.palette()
newPalette.setColor(QtGui.QPalette.Window, QtGui.QColor(50, 50, 50))
self.setPalette(newPalette)
self.setAcceptDrops(True)
def resizeEvent(self, event):
x = 25
y = 5
for wordLabel in self.children():
if x >= (event.size().width() - wordLabel.minimumWidth()):
x = 25
y += 32
wordLabel.move(x, y)
x += wordLabel.minimumWidth() + 2
self.setMinimumHeight(y+40)
def dragEnterEvent(self, event):
if event.mimeData().hasText():
if event.source() in self.children():
event.setDropAction(QtCore.Qt.MoveAction)
event.accept()
else:
event.acceptProposedAction()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasText():
mime = event.mimeData()
pieces = mime.text().split()
position = event.pos()
point = QtCore.QPoint()
pointxy = mime.data('application/x-point').split(' ')
if len(pointxy) == 2:
point.setX(pointxy[0].toInt()[0])
point.setY(pointxy[1].toInt()[0])
for piece in pieces:
newLabel = DragLabel(piece, self)
newLabel.move(point)
newLabel.show()
position += QtCore.QPoint(newLabel.width(), 0)
if event.source() in self.children():
event.setDropAction(QtCore.Qt.MoveAction)
event.accept()
else:
event.acceptProposedAction()
else:
event.ignore()
| {
"content_hash": "b44bbd0cc7110be3a79cab435e95b460",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 203,
"avg_line_length": 35.07017543859649,
"alnum_prop": 0.5167583791895948,
"repo_name": "ghold/OneKeySql",
"id": "817794ffe32facfb88c73c7018ebb6af74c0a053",
"size": "4038",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "onekey/OkTagWidget.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "228760"
}
],
"symlink_target": ""
} |
from matplotlib.pylab import *
import numpy as np
from pymc import *
from pymc.distributions.timeseries import *
from scipy.sparse import csc_matrix
from scipy import optimize
# <markdowncell>
# Asset prices have time-varying volatility (variance of day over day `returns`). In some periods, returns are highly variable, while in others very stable. Stochastic volatility models model this with a latent volatility variable, modeled as a stochastic process. The following model is similar to the one described in the No-U-Turn Sampler paper, Hoffman (2011) p21.
#
# $$ \sigma \sim Exponential(50) $$
#
# $$ \nu \sim Exponential(.1) $$
#
# $$ s_i \sim Normal(s_{i-1}, \sigma^{-2}) $$
#
# $$ log(\frac{y_i}{y_{i-1}}) \sim t(\nu, 0, exp(-2 s_i)) $$
#
# Here, $y$ is the daily return series and $s$ is the latent log
# volatility process.
# <markdowncell>
# ## Build Model
# <markdowncell>
# First we load some daily returns of the S&P 500.
# <codecell>
n = 400
returns = np.genfromtxt(get_data_file('pymc.examples', "data/SP500.csv"))[-n:]
returns[:5]
# <markdowncell>
# Specifying the model in pymc mirrors its statistical specification.
#
# However, it is easier to sample the scale of the log volatility process innovations, $\sigma$, on a log scale, so we create it using `TransformedVar` and use `logtransform`. `TransformedVar` creates one variable in the transformed space and one in the normal space. The one in the transformed space (here $\text{log}(\sigma) $) is the one over which sampling will occur, and the one in the normal space is the one to use throughout the rest of the model.
#
# It takes a variable name, a distribution and a transformation to use.
# <codecell>
model = Model()
with model:
sigma, log_sigma = model.TransformedVar(
'sigma', Exponential.dist(1. / .02, testval=.1),
logtransform)
nu = Exponential('nu', 1. / 10)
s = GaussianRandomWalk('s', sigma ** -2, shape=n)
r = T('r', nu, lam=exp(-2 * s), observed=returns)
# <markdowncell>
# ## Fit Model
#
# To get a decent scaling matrix for the Hamiltonian sampler, we find the Hessian at a point. The method `Model.d2logpc` gives us a `Theano` compiled function that returns the matrix of 2nd derivatives.
#
# However, the 2nd derivatives for the degrees of freedom parameter, `nu`, are negative and thus not very informative and make the matrix non-positive definite, so we replace that entry with a reasonable guess at the scale. The interactions between `log_sigma`/`nu` and `s` are also not very useful, so we set them to zero.
#
# The Hessian matrix is also sparse, so we can get faster sampling by
# using a sparse scaling matrix. If you have `scikits.sparse` installed,
# convert the Hessian to a csc matrixs by uncommenting the appropriate
# line below.
# <codecell>
H = model.fastd2logp()
def hessian(point, nusd):
h = H(Point(point))
h[1, 1] = nusd ** -2
h[:2, 2:] = h[2:, :2] = 0
# h = csc_matrix(h)
return h
# <markdowncell>
# For this model, the full maximum a posteriori (MAP) point is degenerate and has infinite density. However, if we fix `log_sigma` and `nu` it is no longer degenerate, so we find the MAP with respect to the volatility process, 's', keeping `log_sigma` and `nu` constant at their default values.
#
# We use L-BFGS because it is more efficient for high dimensional
# functions (`s` has n elements).
# <codecell>
with model:
start = find_MAP(vars=[s], fmin=optimize.fmin_l_bfgs_b)
# <markdowncell>
# We do a short initial run to get near the right area, then start again
# using a new Hessian at the new starting point to get faster sampling due
# to better scaling. We do a short run since this is an interactive
# example.
# <codecell>
with model:
step = NUTS(model.vars, hessian(start, 6))
def run(n=2000):
if n == "short":
n = 50
with model:
trace = sample(5, step, start, trace=model.vars + [sigma])
# Start next run at the last sampled position.
start2 = trace.point(-1)
step2 = HamiltonianMC(model.vars, hessian(start2, 6), path_length=4.)
trace = sample(n, step2, trace=trace)
# <codecell>
# figsize(12,6)
title(str(s))
plot(trace[s][::10].T, 'b', alpha=.03)
xlabel('time')
ylabel('log volatility')
# figsize(12,6)
traceplot(trace, model.vars[:-1])
if __name__ == '__main__':
run()
# <markdowncell>
# ## References
#
# 1. Hoffman & Gelman. (2011). [The No-U-Turn Sampler: Adaptively Setting
# Path Lengths in Hamiltonian Monte
# Carlo](http://arxiv.org/abs/1111.4246).
| {
"content_hash": "c3ab9169835faf13c9e2094f33c091a9",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 456,
"avg_line_length": 31.819444444444443,
"alnum_prop": 0.6874727193365343,
"repo_name": "kyleam/pymc3",
"id": "676ec3011025b0ce7c48442701e84fbb995fff9b",
"size": "4648",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pymc/examples/stochastic_volatility.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "308941"
},
{
"name": "Shell",
"bytes": "5286"
}
],
"symlink_target": ""
} |
import math
import pandas as pd
from sklearn import preprocessing
# A Note on SKLearn .transform() calls:
#
# Any time you transform your data, you lose the column header names.
# This actually makes complete sense. There are essentially two types
# of transformations, those that change the scale of your features,
# and those that change your features entire. Changing the scale would
# be like changing centimeters to inches. Changing the features would
# be like using PCA to reduce 300 columns to 30. In either case, the
# original column's units have been altered or no longer exist, so it's
# up to you to rename your columns after ANY transformation. Due to
# this, SKLearn returns an NDArray from *transform() calls.
def scaleFeatures(df):
# SKLearn contains many methods for transforming your features by
# scaling them (this is a type of pre-processing):
# RobustScaler, Normalizer, MinMaxScaler, MaxAbsScaler, StandardScaler...
# http://scikit-learn.org/stable/modules/classes.html#module-sklearn.preprocessing
#
# However in order to be effective at PCA, there are a few requirements
# that must be met, and which will drive the selection of your scaler.
# PCA required your data is standardized -- in other words it's mean is
# equal to 0, and it has ~unit variance.
#
# SKLearn's regular Normalizer doesn't zero out the mean of your data,
# it only clamps it, so it's inappropriate to use here (depending on
# your data). MinMaxScaler and MaxAbsScaler both fail to set a unit
# variance, so you won't be using them either. RobustScaler can work,
# again depending on your data (watch for outliers). For these reasons
# we're going to use the StandardScaler. Get familiar with it by visiting
# these two websites:
#
# http://scikit-learn.org/stable/modules/preprocessing.html#preprocessing-scaler
#
# http://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html#sklearn.preprocessing.StandardScaler
#
# ---------
# Feature scaling is the type of transformation that only changes the
# scale and not number of features, so we'll use the original dataset
# column names. However we'll keep in mind that the _units_ have been
# altered:
scaled = preprocessing.StandardScaler().fit_transform(df)
scaled = pd.DataFrame(scaled, columns=df.columns)
print "New Variances:\n", scaled.var()
print "New Describe:\n", scaled.describe()
return scaled
def drawVectors(transformed_features, components_, columns, plt, scaled):
if not scaled:
return plt.axes() # No cheating ;-)
num_columns = len(columns)
# This funtion will project your *original* feature (columns)
# onto your principal component feature-space, so that you can
# visualize how "important" each one was in the
# multi-dimensional scaling
# Scale the principal components by the max value in
# the transformed set belonging to that component
xvector = components_[0] * max(transformed_features[:,0])
yvector = components_[1] * max(transformed_features[:,1])
## visualize projections
# Sort each column by it's length. These are your *original*
# columns, not the principal components.
important_features = { columns[i] : math.sqrt(xvector[i]**2 + yvector[i]**2) for i in range(num_columns) }
important_features = sorted(zip(important_features.values(), important_features.keys()), reverse=True)
print "Features by importance:\n", important_features
ax = plt.axes()
for i in range(num_columns):
# Use an arrow to project each original feature as a
# labeled vector on your principal component axes
plt.arrow(0, 0, xvector[i], yvector[i], color='b', width=0.0005, head_width=0.02, alpha=0.75)
plt.text(xvector[i]*1.2, yvector[i]*1.2, list(columns)[i], color='b', alpha=0.75)
return ax
| {
"content_hash": "eb7952e8cd6d998fecb60d1335f2c028",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 131,
"avg_line_length": 43.93103448275862,
"alnum_prop": 0.7344322344322345,
"repo_name": "FernanOrtega/DAT210x",
"id": "97ac92cd6669173d6c1381b68eb74375a81f52fb",
"size": "3822",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Module4/assignment2_helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "139237"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_kavas_urdano.iff"
result.attribute_template_id = 9
result.stfName("npc_name","chiss_patron")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "6fdc0c1eb0020b5c4620158958f02431",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 66,
"avg_line_length": 22.846153846153847,
"alnum_prop": 0.6936026936026936,
"repo_name": "anhstudios/swganh",
"id": "52aba922a35c3a6b81e3d8eac210c0a68ded0313",
"size": "442",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/mobile/shared_dressed_kavas_urdano.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
class HTTPError(Exception):pass | {
"content_hash": "22d1fe82f6d95ae83d92bd1953f6256a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 31,
"avg_line_length": 31,
"alnum_prop": 0.8709677419354839,
"repo_name": "olemis/brython",
"id": "3666af57c55ea1f0d666fa4933c1267b4f221ae1",
"size": "31",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "www/src/Lib/urllib/error.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "15757"
},
{
"name": "Groff",
"bytes": "21080"
},
{
"name": "HTML",
"bytes": "4919603"
},
{
"name": "JavaScript",
"bytes": "4654888"
},
{
"name": "Makefile",
"bytes": "61"
},
{
"name": "Python",
"bytes": "14166957"
},
{
"name": "R",
"bytes": "2918"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import bokeh.command.subcommands.secret as scsecret
from bokeh.command.bootstrap import main
def test_create():
import argparse
from bokeh.command.subcommand import Subcommand
obj = scsecret.Secret(parser=argparse.ArgumentParser())
assert isinstance(obj, Subcommand)
def test_name():
assert scsecret.Secret.name == "secret"
def test_help():
assert scsecret.Secret.help == "Create a Bokeh secret key for use with Bokeh server"
def test_args():
assert scsecret.Secret.args == (
)
def test_run(capsys):
main(["bokeh", "secret"])
out, err = capsys.readouterr()
assert err == ""
assert len(out) == 45
assert out[-1] == '\n'
| {
"content_hash": "77247467ceb6158ff8fbf9035dd130af",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 88,
"avg_line_length": 25.607142857142858,
"alnum_prop": 0.6875871687587168,
"repo_name": "azjps/bokeh",
"id": "b8ee97d8ba245c21e292bb60c8ecedf0a43e7fda",
"size": "717",
"binary": false,
"copies": "19",
"ref": "refs/heads/master",
"path": "bokeh/command/subcommands/tests/test_secret.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1710"
},
{
"name": "CSS",
"bytes": "92582"
},
{
"name": "CoffeeScript",
"bytes": "1051340"
},
{
"name": "HTML",
"bytes": "46812"
},
{
"name": "JavaScript",
"bytes": "34439"
},
{
"name": "Jupyter Notebook",
"bytes": "3981"
},
{
"name": "Makefile",
"bytes": "1164"
},
{
"name": "Python",
"bytes": "2152481"
},
{
"name": "Shell",
"bytes": "13140"
},
{
"name": "TypeScript",
"bytes": "87868"
}
],
"symlink_target": ""
} |
from flask import Flask, request
from shutil import copyfile
import json, os, requests, redis, collections, thread, time
app = Flask(__name__)
rackhdHost = "http://localhost:8080"
rackhdAPI = rackhdHost + "/api/2.0"
rackhdConfig = "~/rackhd-pm2-config.yml"
flaskDir = "/home/labadmin/rackhd-scaleio/flask/"
notificationsLog = flaskDir + 'notifications.log'
ansiblePlaybookDir = "/home/labadmin/cloud-infra/playbooks/inf/"
ansiblePlaybook = ansiblePlaybookDir + "scaleio.yml"
ansibleAddPlaybook = ansiblePlaybookDir + "scaleio.add.yml"
ansibleDelPlaybook = ansiblePlaybookDir + "scaleio.del.yml"
ansibleInventoryDir = "/home/labadmin/cloud-infra/inventory/"
ansibleInventory = ansibleInventoryDir + "scaleio.inv"
ansibleRemoteUser = 'root'
PUT = "PUT"
POST = "POST"
GET = "GET"
rebootWait = 120
SCALEIO_CLUSTER_SIZE = 3
@app.route("/notification/", methods=['POST', 'PUT'])
def notification():
notification = json.loads(request.data)
with open(notificationsLog, 'a') as f:
json.dump(notification, f, indent=4)
f.write("\n\n")
if notification['data'].get('graphName') == "VMware Default Workflow" and notification['data'].get('progress').get('percentage') == '100%':
_addNode(notification['nodeId'])
return 'OK'
@app.route("/cleanup/", methods=['POST', 'PUT'])
def cleanup():
os.system("redis-cli del scaleio_lock scaleio_nodes scaleio_mdm scaleio_tb scaleio_additional_nodes")
os.system('echo "db.dropDatabase()" | mongo pxe')
os.system("sudo pm2 restart " + rackhdConfig)
return 'OK'
@app.route("/initialize/", methods=['POST', 'PUT'])
def initialize():
url = rackhdAPI + '/users'
args = {"username": "admin", "password": "admin123", "role": "Administrator"}
requests.post(url, json=args)
apiCalls = [(PUT, "/workflows/graphs", "VMware_default_workflow.json"),
(POST, "/skus", "VMware_sku.json"),
(POST, "/hooks", "webhook.json")]
_callRackHD(apiCalls)
return 'OK'
@app.route("/addToRedis/", methods=['POST', 'PUT'])
def addToRedis():
_addToRedis(request.form['ipaddress'])
return 'OK'
@app.route("/removeNode/", methods=['POST', 'PUT'])
def removeNode():
_removeNode(request.form['ipaddress'])
return 'OK'
def _callRackHD(apiCalls):
token = _loginRackHD()
headers = {'Authorization': 'JWT ' + token}
resp = []
for method, api, filename in apiCalls:
url = rackhdAPI + api
kwargs = {'headers': headers}
if filename:
jsonFile = flaskDir + filename
with open(jsonFile, 'r') as f:
kwargs['json'] = json.load(f)
resp.append(requests.request(method, url, **kwargs))
return resp
def _loginRackHD():
args = {'username': 'admin', 'password': 'admin123'}
url = rackhdHost + '/login'
r = requests.post(url, json=args)
return json.loads(r.text)['token']
def _addNode(nodeId):
apiCalls = [(GET, '/nodes/' + nodeId + '/catalogs/ohai', None)]
resp = _callRackHD(apiCalls)[0]
ipaddress = json.loads(resp.text)['data']['ipaddress']
thread.start_new_thread(_addToRedis, (ipaddress, ))
def _addToRedis(ipaddress):
r = redis.StrictRedis()
while not r.setnx('scaleio_lock', 1):
time.sleep(rebootWait)
if ipaddress in r.smembers('scaleio_nodes'):
r.delete('scaleio_lock')
return
else:
r.sadd('scaleio_nodes', ipaddress)
if r.scard('scaleio_mdm') < (SCALEIO_CLUSTER_SIZE // 2 + 1):
r.sadd('scaleio_mdm', ipaddress)
elif r.scard('scaleio_tb') < (SCALEIO_CLUSTER_SIZE // 2):
r.sadd('scaleio_tb', ipaddress)
if r.scard('scaleio_tb') == (SCALEIO_CLUSTER_SIZE // 2):
_deployScaleIO(r)
else:
r.sadd('scaleio_additional_nodes', ipaddress)
thread.start_new_thread(_deployAdditionalNodes, ([ipaddress], ))
r.delete('scaleio_lock')
def _deployScaleIO(r):
time.sleep(rebootWait)
components = collections.OrderedDict()
components['mdm'] = r.smembers('scaleio_mdm')
components['tb'] = r.smembers('scaleio_tb')
components['gateway'] = components['tb']
components['sds'] = components['mdm'].union(components['tb'])
components['lia'] = components['sds']
components['sdc'] = components['sds']
with open(ansibleInventory, 'w') as f:
f.truncate()
for (k, v) in components.items():
_appendComponent(f, k, v)
_deployScaleIOByAnsible(ansibleInventory, ansiblePlaybook)
def _appendComponent(f, name, hosts):
f.write('[' + name + "]\n")
for host in hosts:
f.write(host + "\n")
f.write("\n")
def _deployScaleIOByAnsible(inventory, playbook):
os.chdir(ansiblePlaybookDir)
os.system("ansible-playbook -i " + inventory + " -u " + ansibleRemoteUser + " " + playbook)
def _deployAdditionalNodes(hosts):
inventory = ansibleInventory + '.' + str(id(hosts))
copyfile(ansibleInventory, inventory)
with open(inventory, 'a') as f:
_appendComponent(f, 'additional_nodes', hosts)
_deployScaleIOByAnsible(inventory, ansibleAddPlaybook)
os.remove(inventory)
def _removeNode(ipaddress):
r = redis.StrictRedis()
while not r.setnx('scaleio_lock', 1):
time.sleep(rebootWait)
if ipaddress in r.smembers('scaleio_nodes'):
r.srem('scaleio_nodes', ipaddress)
else:
r.delete('scaleio_lock')
return
if ipaddress in r.smembers('scaleio_mdm'):
r.srem('scaleio_mdm', ipaddress)
_removeMDM(ipaddress)
elif ipaddress in r.smembers('scaleio_tb'):
r.srem('scaleio_tb', ipaddress)
_removeTB(ipaddress)
else:
r.srem('scaleio_additional_nodes', ipaddress)
thread.start_new_thread(_removeAdditionalNodes, ([ipaddress], ))
r.delete('scaleio_lock')
def _removeAdditionalNodes(hosts):
inventory = ansibleInventory + '.' + str(id(hosts))
copyfile(ansibleInventory, inventory)
with open(inventory, 'a') as f:
_appendComponent(f, 'removed_nodes', hosts)
_deployScaleIOByAnsible(inventory, ansibleDelPlaybook)
os.remove(inventory)
def _removeMDM(ipaddress):
return
def _removeTB(ipaddress):
return
| {
"content_hash": "523feba32684d322cf34ef0aca7c0be3",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 143,
"avg_line_length": 31.20408163265306,
"alnum_prop": 0.6536952256376717,
"repo_name": "jialehuo/rackhd-scaleio",
"id": "cab6f8b85d2311422b9ad7fd0bccd091cdd8bbb1",
"size": "6116",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flask/rackhd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6116"
}
],
"symlink_target": ""
} |
from absl import app
from iree.tf.support import tf_test_utils
import numpy as np
import tensorflow.compat.v2 as tf
class ControlFlowModule(tf.Module):
def __init__(self):
pass
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def collatz(self, a):
i = 0.
while a > 1.:
i = i + 1.
if (a % 2.) > 0.:
a = 3. * a + 1.
else:
a = a / 2.
return i
class ControlFlowTest(tf_test_utils.TracedModuleTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._modules = tf_test_utils.compile_tf_module(ControlFlowModule)
def test_short_sequence(self):
def short_sequence(module):
input_array = np.array(9., dtype=np.float32)
module.collatz(input_array)
self.compare_backends(short_sequence, self._modules)
def test_long_sequence(self):
def long_sequence(module):
input_array = np.array(178., dtype=np.float32)
module.collatz(input_array)
self.compare_backends(long_sequence, self._modules)
def main(argv):
del argv # Unused
if hasattr(tf, 'enable_v2_behavior'):
tf.enable_v2_behavior()
tf.test.main()
if __name__ == '__main__':
app.run(main)
| {
"content_hash": "01bea1f291cf542ec3ea28a4af09700e",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 70,
"avg_line_length": 22.01818181818182,
"alnum_prop": 0.6317093311312965,
"repo_name": "google/iree",
"id": "0196a7f3bdfeb125f016e138ea1f281ee64683cf",
"size": "1429",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "integrations/tensorflow/test/python/iree_tf_tests/uncategorized/control_flow_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "23010"
},
{
"name": "Batchfile",
"bytes": "353"
},
{
"name": "C",
"bytes": "3830546"
},
{
"name": "C++",
"bytes": "8161374"
},
{
"name": "CMake",
"bytes": "899403"
},
{
"name": "Dockerfile",
"bytes": "28245"
},
{
"name": "GLSL",
"bytes": "2629"
},
{
"name": "HTML",
"bytes": "31018"
},
{
"name": "Java",
"bytes": "31697"
},
{
"name": "JavaScript",
"bytes": "18714"
},
{
"name": "MLIR",
"bytes": "5606822"
},
{
"name": "NASL",
"bytes": "3852"
},
{
"name": "PowerShell",
"bytes": "7893"
},
{
"name": "Python",
"bytes": "1143963"
},
{
"name": "Shell",
"bytes": "248374"
},
{
"name": "Starlark",
"bytes": "600260"
}
],
"symlink_target": ""
} |
import os
import time
import pytest
import requests
from generic_test_code.common import assert_endpoint_response
from util import GuardedSubprocess, SearchCriteria, auth_type_str
EXHIBITOR_PATH = "/exhibitor/foo/bar"
# Note(JP): this test assumes that the IAM is contacted when trying to reach
# /mesos_dns. This is not a good assumption. TODO: rewrite the test so that
# setting the User-Agent header is somehow tested differently.
# class TestAuthzIAMBackendQueryCommon:
# def test_if_master_ar_sets_correct_useragent_while_quering_iam(
# self, master_ar_process_pertest, mocker, valid_user_header):
# mocker.send_command(endpoint_id='http://127.0.0.1:8101',
# func_name='record_requests')
# assert_endpoint_response(
# master_ar_process_pertest,
# '/mesos_dns/v1/reflect/me',
# 200,
# headers=valid_user_header,
# )
# r_reqs = mocker.send_command(endpoint_id='http://127.0.0.1:8101',
# func_name='get_recorded_requests')
# assert len(r_reqs) == 1
# verify_header(r_reqs[0]['headers'], 'User-Agent', 'Master Admin Router')
class TestAuthnJWTValidator:
"""Tests scenarios where authentication token isn't provided or is provided
in different supported places (cookie, header)"""
def test_auth_token_not_provided(self, master_ar_process_perclass):
log_messages = {
"No auth token in request.": SearchCriteria(1, True),
}
assert_endpoint_response(
master_ar_process_perclass, EXHIBITOR_PATH, 401, assert_error_log=log_messages)
def test_invalid_auth_token_in_cookie(self, master_ar_process_perclass):
log_messages = {
"No auth token in request.": SearchCriteria(0, True),
"Invalid token. Reason: invalid jwt string":
SearchCriteria(1, True),
}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
401,
assert_error_log=log_messages,
cookies={"dcos-acs-auth-cookie": "invalid"},
)
# Note(JP): in the future we should simply test that only RS256 works, in
# both variants.
# def test_missmatched_auth_token_algo_in_cookie(
# self,
# master_ar_process_perclass,
# mismatch_alg_jwt_generator,
# repo_is_ee,
# ):
# log_messages = {
# ("Invalid token. Reason: whitelist unsupported alg: " +
# jwt_type_str(not repo_is_ee)): SearchCriteria(1, True),
# }
# token = mismatch_alg_jwt_generator(uid='user')
# assert_endpoint_response(
# master_ar_process_perclass,
# EXHIBITOR_PATH,
# 401,
# assert_error_log=log_messages,
# cookies={"dcos-acs-auth-cookie": token},
# )
def test_valid_auth_token_in_cookie_with_null_uid(
self,
master_ar_process_perclass,
jwt_generator,
):
log_messages = {
"No auth token in request.": SearchCriteria(0, True),
"Invalid token. Reason: invalid jwt string":
SearchCriteria(0, True),
"Unexpected token payload: missing uid.":
SearchCriteria(1, True),
}
token = jwt_generator(uid=None)
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
401,
assert_error_log=log_messages,
cookies={"dcos-acs-auth-cookie": token},
)
def test_valid_auth_token_in_cookie(
self,
master_ar_process_perclass,
jwt_generator):
log_messages = {
"No auth token in request.": SearchCriteria(0, True),
"Invalid token. Reason: invalid jwt string":
SearchCriteria(0, True),
"UID from the valid DC/OS authentication token: `test`": SearchCriteria(1, True),
}
token = jwt_generator(uid='test')
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
200,
assert_error_log=log_messages,
cookies={"dcos-acs-auth-cookie": token},
)
def test_valid_auth_token(self, master_ar_process_perclass, valid_user_header):
log_messages = {
"UID from the valid DC/OS authentication token: `bozydar`":
SearchCriteria(1, True),
}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
200,
assert_error_log=log_messages,
headers=valid_user_header,
)
def test_valid_auth_token_priority(
self,
master_ar_process_perclass,
valid_user_header,
jwt_generator,
):
log_messages = {
"UID from the valid DC/OS authentication token: `bozydar`":
SearchCriteria(1, True),
"UID from the valid DC/OS authentication token: `test`":
SearchCriteria(0, True),
}
token = jwt_generator(uid='test')
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
200,
assert_error_log=log_messages,
headers=valid_user_header,
cookies={"dcos-acs-auth-cookie": token},
)
def test_valid_auth_token_without_uid(
self,
master_ar_process_perclass,
jwt_generator,
):
log_messages = {
"Invalid token. Reason: Missing one of claims - \[ uid \]":
SearchCriteria(1, True),
}
token = jwt_generator(uid='test', skip_uid_claim=True)
auth_header = {'Authorization': 'token={}'.format(token)}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
401,
assert_error_log=log_messages,
headers=auth_header,
)
def test_valid_auth_token_without_exp(
self,
master_ar_process_perclass,
jwt_generator,
):
# We accept "forever tokens"
token = jwt_generator(uid='test', skip_exp_claim=True)
auth_header = {'Authorization': 'token={}'.format(token)}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
200,
headers=auth_header,
)
def test_expired_auth_token(
self,
master_ar_process_perclass,
jwt_generator,
):
log_messages = {
"Invalid token. Reason: 'exp' claim expired at ":
SearchCriteria(1, True),
}
token = jwt_generator(uid='test', exp=time.time() - 15)
auth_header = {'Authorization': 'token={}'.format(token)}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
401,
assert_error_log=log_messages,
headers=auth_header,
)
def test_valid_auth_token_with_bearer_header(
self,
master_ar_process_perclass,
jwt_generator,
):
# We accept "forever tokens"
token = jwt_generator(uid='test')
auth_header = {'Authorization': 'Bearer {}'.format(token)}
assert_endpoint_response(
master_ar_process_perclass,
EXHIBITOR_PATH,
200,
headers=auth_header,
)
class TestAuthCustomErrorPages:
def test_correct_401_page_content(self, master_ar_process_pertest, repo_is_ee):
url = master_ar_process_pertest.make_url_from_path(EXHIBITOR_PATH)
resp = requests.get(url)
assert resp.status_code == 401
assert resp.headers["Content-Type"] == "text/html; charset=UTF-8"
assert resp.headers["WWW-Authenticate"] == auth_type_str(repo_is_ee)
path_401 = os.environ.get('AUTH_ERROR_PAGE_DIR_PATH') + "/401.html"
with open(path_401, 'rb') as f:
resp_content = resp.content.decode('utf-8').strip()
file_content = f.read().decode('utf-8').strip()
assert resp_content == file_content
class TestAuthPrecedence:
def test_if_service_endpoint_auth_precedence_is_enforced(
self,
valid_user_header,
master_ar_process_pertest):
url = master_ar_process_pertest.make_url_from_path("/service/i/do/not/exist")
resp = requests.get(
url,
allow_redirects=False)
assert resp.status_code == 401
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
@pytest.mark.parametrize("path", ["/system/v1/agent/{}/logs{}", "/agent/{}{}"])
def test_if_agent_endpoint_auth_precedence_is_enforced(
self,
valid_user_header,
master_ar_process_pertest,
path):
uri = path.format("bdcd424a-b59e-4df4-b492-b54e38926bd8-S0", "/foo/bar")
url = master_ar_process_pertest.make_url_from_path(uri)
resp = requests.get(
url,
allow_redirects=False)
assert resp.status_code == 401
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 404
def test_if_mleader_endpoint_auth_precedence_is_enforced(
self,
valid_user_header,
master_ar_process_pertest,
mocker):
# We have to remove the leader in order to make AR respond with 404
# which has a chance of being processed earlier than auth.
mocker.send_command(endpoint_id='http://127.0.0.1:8080',
func_name='remove_leader')
url = master_ar_process_pertest.make_url_from_path(
"/system/v1/leader/marathon/foo/bar")
resp = requests.get(
url,
allow_redirects=False)
assert resp.status_code == 401
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
def test_if_historyservice_endpoint_auth_precedence_is_enforced(
self, valid_user_header, mocker, nginx_class):
ar = nginx_class(host_ip=None)
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
resp = requests.get(url, allow_redirects=False)
assert resp.status_code == 401
resp = requests.get(url, allow_redirects=False, headers=valid_user_header)
assert resp.status_code == 503
| {
"content_hash": "99ff3cc8af68bba0290fcb2a662e1588",
"timestamp": "",
"source": "github",
"line_count": 331,
"max_line_length": 93,
"avg_line_length": 33.438066465256796,
"alnum_prop": 0.5576436573906758,
"repo_name": "GoelDeepak/dcos",
"id": "281eb28523ea9f96b0d257f9391dcd09015a606d",
"size": "11132",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packages/adminrouter/extra/src/test-harness/tests/test_auth.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2529"
},
{
"name": "Dockerfile",
"bytes": "11457"
},
{
"name": "Groovy",
"bytes": "711"
},
{
"name": "HTML",
"bytes": "94641"
},
{
"name": "Lua",
"bytes": "195164"
},
{
"name": "Makefile",
"bytes": "179"
},
{
"name": "PowerShell",
"bytes": "20017"
},
{
"name": "Python",
"bytes": "1518270"
},
{
"name": "Shell",
"bytes": "107438"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
import upyunstorage
setup(
name = 'django-upyun-storage',
version = upyunstorage.__version__,
packages = find_packages(),
author = 'Fei Tao',
author_email = '[email protected]',
license = 'BSD',
description = 'Upyun storage backend for Django pluggable storage system',
url='https://github.com/ftao/django-upyun-storage',
classifiers = [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
#test_suite='tests.main',
zip_safe = False,
)
| {
"content_hash": "f8570fe8e2cc7672384b7659c2a04db3",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 31.2,
"alnum_prop": 0.6256410256410256,
"repo_name": "ftao/django-upyun-storage",
"id": "9a3df47f1ed72ec5eee23e604bec9f09217d0fe2",
"size": "780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "17550"
}
],
"symlink_target": ""
} |
"""
Main program for the child process run by
L{twisted.test.test_stdio.StandardInputOutputTests.test_loseConnection} to
test that ITransport.loseConnection() works for process transports.
"""
from __future__ import absolute_import, division
import sys
from twisted.internet.error import ConnectionDone
from twisted.internet import stdio, protocol
from twisted.python import reflect, log
class LoseConnChild(protocol.Protocol):
exitCode = 0
def connectionMade(self):
self.transport.loseConnection()
def connectionLost(self, reason):
"""
Check that C{reason} is a L{Failure} wrapping a L{ConnectionDone}
instance and stop the reactor. If C{reason} is wrong for some reason,
log something about that in C{self.errorLogFile} and make sure the
process exits with a non-zero status.
"""
try:
try:
reason.trap(ConnectionDone)
except:
log.err(None, "Problem with reason passed to connectionLost")
self.exitCode = 1
finally:
reactor.stop()
if __name__ == '__main__':
reflect.namedAny(sys.argv[1]).install()
log.startLogging(open(sys.argv[2], 'wb'))
from twisted.internet import reactor
protocol = LoseConnChild()
stdio.StandardIO(protocol)
reactor.run()
sys.exit(protocol.exitCode)
| {
"content_hash": "93b0a05b2d67dcc2a021cb6ded4bc784",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 78,
"avg_line_length": 30,
"alnum_prop": 0.6659420289855073,
"repo_name": "ntuecon/server",
"id": "008cff6e74279a329c463dbc1d8b6b4638815081",
"size": "1548",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "pyenv/Lib/site-packages/twisted/test/stdio_test_loseconn.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "2209"
},
{
"name": "Batchfile",
"bytes": "1509"
},
{
"name": "C",
"bytes": "504013"
},
{
"name": "C++",
"bytes": "96440"
},
{
"name": "CSS",
"bytes": "133288"
},
{
"name": "GAP",
"bytes": "18122"
},
{
"name": "HTML",
"bytes": "150026"
},
{
"name": "JavaScript",
"bytes": "243314"
},
{
"name": "Objective-C",
"bytes": "1292"
},
{
"name": "PowerShell",
"bytes": "8325"
},
{
"name": "Python",
"bytes": "27048260"
},
{
"name": "Shell",
"bytes": "47820"
},
{
"name": "Tcl",
"bytes": "1237796"
},
{
"name": "Visual Basic",
"bytes": "949"
},
{
"name": "XSLT",
"bytes": "2113"
}
],
"symlink_target": ""
} |
"""
Copyright 2011 Jeff Garzik
AuthServiceProxy has the following improvements over python-jsonrpc's
ServiceProxy class:
- HTTP connections persist for the life of the AuthServiceProxy object
(if server supports HTTP/1.1)
- sends protocol 'version', per JSON-RPC 1.1
- sends proper, incrementing 'id'
- sends Basic HTTP authentication headers
- parses all JSON numbers that look like floats as Decimal
- uses standard Python json lib
Previous copyright, from python-jsonrpc/jsonrpc/proxy.py:
Copyright (c) 2007 Jan-Klaas Kollhof
This file is part of jsonrpc.
jsonrpc is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this software; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
try:
import http.client as httplib
except ImportError:
import httplib
import base64
import decimal
import json
import logging
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
USER_AGENT = "AuthServiceProxy/0.1"
HTTP_TIMEOUT = 30
log = logging.getLogger("MonetaRPC")
class JSONRPCException(Exception):
def __init__(self, rpc_error):
Exception.__init__(self)
self.error = rpc_error
def EncodeDecimal(o):
if isinstance(o, decimal.Decimal):
return round(o, 8)
raise TypeError(repr(o) + " is not JSON serializable")
class AuthServiceProxy(object):
__id_count = 0
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
self.__service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
else:
port = self.__url.port
(user, passwd) = (self.__url.username, self.__url.password)
try:
user = user.encode('utf8')
except AttributeError:
pass
try:
passwd = passwd.encode('utf8')
except AttributeError:
pass
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
if connection:
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
None, None, False,
timeout)
else:
self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
False, timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
if self.__service_name is not None:
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
response = self._get_response()
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
raise JSONRPCException({
'code': -343, 'message': 'missing JSON-RPC result'})
else:
return response['result']
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
self.__conn.request('POST', self.__url.path, postdata,
{'Host': self.__url.hostname,
'User-Agent': USER_AGENT,
'Authorization': self.__auth_header,
'Content-type': 'application/json'})
return self._get_response()
def _get_response(self):
http_response = self.__conn.getresponse()
if http_response is None:
raise JSONRPCException({
'code': -342, 'message': 'missing HTTP response from server'})
responsedata = http_response.read().decode('utf8')
response = json.loads(responsedata, parse_float=decimal.Decimal)
if "error" in response and response["error"] is None:
log.debug("<-%s- %s"%(response["id"], json.dumps(response["result"], default=EncodeDecimal)))
else:
log.debug("<-- "+responsedata)
return response
| {
"content_hash": "03c75cbca2f28e0904a0a74a78da54ea",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 105,
"avg_line_length": 37.303225806451614,
"alnum_prop": 0.589069526115531,
"repo_name": "moneta-develop/moneta",
"id": "37d59aab1c05efa2b89e6f601beb86102fac49f4",
"size": "5783",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/python-monetarpc/bitcoinrpc/authproxy.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "7639"
},
{
"name": "C",
"bytes": "388529"
},
{
"name": "C++",
"bytes": "3518396"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "17983"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2099"
},
{
"name": "Makefile",
"bytes": "61678"
},
{
"name": "Objective-C",
"bytes": "2020"
},
{
"name": "Objective-C++",
"bytes": "7244"
},
{
"name": "Protocol Buffer",
"bytes": "2304"
},
{
"name": "Python",
"bytes": "211715"
},
{
"name": "QMake",
"bytes": "2018"
},
{
"name": "Shell",
"bytes": "40427"
}
],
"symlink_target": ""
} |
'''
Given a collection of candidate numbers (C) and a target number (T), find all unique combinations in C where the candidate numbers sums to T.
Each number in C may only be used once in the combination.
Note:
All numbers (including target) will be positive integers.
Elements in a combination (a1, a2, … , ak) must be in non-descending order. (ie, a1 ≤ a2 ≤ … ≤ ak).
The solution set must not contain duplicate combinations.
For example, given candidate set 10,1,2,7,6,1,5 and target 8,
A solution set is:
[1, 7]
[1, 2, 5]
[2, 6]
[1, 1, 6]
'''
class Solution(object):
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
if not candidates:
return []
candidates.sort()
result = []
self.combination(candidates, target, [], result)
return result
def combination(self, candidates, target, current, result):
s = sum(current) if current else 0
if s > target:
return
elif s == target:
result.append(current)
return
else:
i = 0
while i < len(candidates):
self.combination(candidates[i + 1:], target, current + [candidates[i]], result)
# ignore repeating elements
while i + 1 < len(candidates) and candidates[i] == candidates[i + 1]:
i += 1
i += 1
if __name__ == "__main__":
assert Solution().combinationSum2([10, 1, 2, 7, 6, 1, 5], 8) == [[1, 1, 6], [1, 2, 5], [1, 7], [2, 6]] | {
"content_hash": "39e03eb6539ea78f24eb463dd042c3e9",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 141,
"avg_line_length": 32.68,
"alnum_prop": 0.5636474908200735,
"repo_name": "gavinfish/leetcode-share",
"id": "24101fcbee7dd5f5af277493ff7534cebbec653b",
"size": "1644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/040 Combination Sum II.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "81458"
},
{
"name": "Python",
"bytes": "222883"
}
],
"symlink_target": ""
} |
from Metarouting.Algebra.Semiring import *
from Metarouting.Algebra.Products import *
from Metarouting.Policy.Routing.ShortestR import *
class ShortestOnShortest(Tunnel):
zeroElt = (ShortestR.zeroElt, ShortestR.zeroElt)
unitElt = (ShortestR.unitElt, ShortestR.unitElt)
def __init__(self, val):
(s,t) = val
if(s.__class__ != ShortestR):
s = ShortestR(s)
if(t.__class__ != ShortestR):
t = ShortestR(t)
self.elt = (s,t)
def __repr__(self):
return self.elt.__repr__()
| {
"content_hash": "2c9e949469da60898f75480d1422911e",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 52,
"avg_line_length": 28.842105263157894,
"alnum_prop": 0.6094890510948905,
"repo_name": "sdynerow/SemiringsLibraryPython",
"id": "4af3784e943fe9f12ecfcb6012fc70a0b3d95cf4",
"size": "1187",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Metarouting/Policy/Routing/ShortestOnShortest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "94912"
}
],
"symlink_target": ""
} |
from random import Random
class Cell(object):
def __init__(self, is_wall):
self.is_wall = is_wall
if not is_wall:
self.in_maze = False # This is only looked at if it's not a wall
def generate_maze(height, width):
# Width and height including walls:
full_width = width*2 + 1
full_height = height*2 + 1
maze = [[Cell(col % 2 == 0 or row % 2 == 0) # even-numbered row/col => wall
for col in xrange(full_width)]
for row in xrange(full_height)]
wall_list = []
random = Random()
random.seed()
start_col = 2*random.randint(0, width-1) + 1
start_row = 2*random.randint(0, height-1) + 1
maze[start_row][start_col].in_maze = True
maze[start_row][start_col].is_wall = False
class Wall(object):
def __init__(self, row, col, visited_row, visited_col):
# row, col are of the wall; visited_* are of the cell we were
# visiting when we added this wall.
self.row = row
self.col = col
# row, col of the cell on the "opposite" side, if applicable.
if (0 < row < full_height-1 and 0 < col < full_width-1):
self.opp_row = row + (row - visited_row)
self.opp_col = col + (col - visited_col)
else:
self.opp_row = self.opp_col = None
wall_list += [Wall(start_row-1, start_col, start_row, start_col),
Wall(start_row+1, start_col, start_row, start_col),
Wall(start_row, start_col-1, start_row, start_col),
Wall(start_row, start_col+1, start_row, start_col)]
while len(wall_list) != 0:
wall = random.choice(wall_list)
wall_list.remove(wall)
# If the wall is still a wall, and there's a cell on the opposite
# side, and that opposite cell isn't in the maze yet...
if (wall.opp_row is not None
and maze[wall.row][wall.col].is_wall
and not maze[wall.opp_row][wall.opp_col].in_maze):
new_row = wall.opp_row
new_col = wall.opp_col
# ...then strike down the wall and put that opposite cell in the
# maze,
maze[wall.row][wall.col].is_wall = False
maze[new_row][new_col].in_maze = True
# and add the new cell's walls to the wall list.
for new_wall in [(new_row+1, new_col), (new_row-1, new_col),
(new_row, new_col+1), (new_row, new_col-1)]:
if maze[new_wall[0]][new_wall[1]].is_wall:
wall_list.append(Wall(new_wall[0], new_wall[1],
new_row, new_col))
# Create an entrance (top left) and exit (bottom right).
maze[0][1].is_wall = False
maze[full_height-1][full_width-2].is_wall = False
return maze
| {
"content_hash": "7fe4b32a0577f8307e2d603054e07a7c",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 80,
"avg_line_length": 38.74324324324324,
"alnum_prop": 0.5434251831182421,
"repo_name": "graue/mazegen",
"id": "2909e0281a7855f442d4869039e32df0efdc8bd5",
"size": "2918",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mazegen.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3871"
}
],
"symlink_target": ""
} |
from .connection import Connection
from .account_management import AccountManagement
from .provisioning import Provisioning
# from .reporting import Reporting
# from .audit import Audit
class Client:
def __init__(self, username, password, host="api.security.biz"):
""" Initialize the client.
Arguments:
username -- The username of the user.
password -- The password of the user.
Keyword Arguments:
host -- Allows you to point to a server other than the production server.
"""
self.connection = Connection(host)
self.connection.auth(username, password)
def provisioning(self):
"""Create a Provisioning object."""
return Provisioning(self.connection)
def account_management(self):
"""Create an Account Management object."""
return AccountManagement(self.connection) | {
"content_hash": "1721f5f792a5da3c849c88da85d196d2",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 75,
"avg_line_length": 28.5,
"alnum_prop": 0.7506265664160401,
"repo_name": "sbarbett/ssp-sdk-python",
"id": "5ebc0ca49c97237dbab2d1b22dcc83509de9b98d",
"size": "1394",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "42880"
}
],
"symlink_target": ""
} |
from __future__ import print_function, absolute_import
def show(*args, **kw):
# Compatibility with dss 0.6 or older.
# Use dss.tools.show.show instead
print(*args, **kw)
class DictObj(dict):
""" Dictionary with attribute syntax to get, set and delete items.
"""
def __getattr__(self, item):
try:
return self[item]
except KeyError:
raise AttributeError(item)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, item):
try:
del self[item]
except KeyError:
raise AttributeError(item)
class Suppress:
""" Silence chosen exceptions.
Almost like `contextlib.suppress` (Only available on Python 3.4+).
"""
def __init__(self, *args):
self.cls = args or None
self.errors = []
def __enter__(self):
return self
def __call__(self, *args):
return type(self)(*args)
def __exit__(self, cls, exc, trace):
if cls is not None:
self.errors.append((cls, exc, trace))
if self.cls is None or cls is None or issubclass(cls, self.cls):
return True
| {
"content_hash": "39de461d87727ce53286ec8341767956",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 74,
"avg_line_length": 24.645833333333332,
"alnum_prop": 0.5655114116652579,
"repo_name": "terabit-software/dynamic-stream-server",
"id": "6f2dceb3b00489712861881f49429140926a5f25",
"size": "1183",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "dss/tools/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3182"
},
{
"name": "HTML",
"bytes": "3017"
},
{
"name": "JavaScript",
"bytes": "17266"
},
{
"name": "Nginx",
"bytes": "3215"
},
{
"name": "Python",
"bytes": "131113"
},
{
"name": "Shell",
"bytes": "1795"
},
{
"name": "XSLT",
"bytes": "11765"
}
],
"symlink_target": ""
} |
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath
import os
class ResampleDTIVolumeInputSpec(CommandLineInputSpec):
inputVolume = File(position=-2, desc="Input volume to be resampled", exists=True, argstr="%s")
outputVolume = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Resampled Volume", argstr="%s")
Reference = File(desc="Reference Volume (spacing,size,orientation,origin)", exists=True, argstr="--Reference %s")
transformationFile = File(exists=True, argstr="--transformationFile %s")
defField = File(desc="File containing the deformation field (3D vector image containing vectors with 3 components)", exists=True, argstr="--defField %s")
hfieldtype = traits.Enum("displacement", "h-Field", desc="Set if the deformation field is an -Field", argstr="--hfieldtype %s")
interpolation = traits.Enum("linear", "nn", "ws", "bs", desc="Sampling algorithm (linear , nn (nearest neighborhoor), ws (WindowedSinc), bs (BSpline) )", argstr="--interpolation %s")
correction = traits.Enum("zero", "none", "abs", "nearest", desc="Correct the tensors if computed tensor is not semi-definite positive", argstr="--correction %s")
transform_tensor_method = traits.Enum("PPD", "FS", desc="Chooses between 2 methods to transform the tensors: Finite Strain (FS), faster but less accurate, or Preservation of the Principal Direction (PPD)", argstr="--transform_tensor_method %s")
transform_order = traits.Enum("input-to-output", "output-to-input", desc="Select in what order the transforms are read", argstr="--transform_order %s")
notbulk = traits.Bool(desc="The transform following the BSpline transform is not set as a bulk transform for the BSpline transform", argstr="--notbulk ")
spaceChange = traits.Bool(desc="Space Orientation between transform and image is different (RAS/LPS) (warning: if the transform is a Transform Node in Slicer3, do not select)", argstr="--spaceChange ")
rotation_point = traits.List(desc="Center of rotation (only for rigid and affine transforms)", argstr="--rotation_point %s")
centered_transform = traits.Bool(desc="Set the center of the transformation to the center of the input image (only for rigid and affine transforms)", argstr="--centered_transform ")
image_center = traits.Enum("input", "output", desc="Image to use to center the transform (used only if \'Centered Transform\' is selected)", argstr="--image_center %s")
Inverse_ITK_Transformation = traits.Bool(desc="Inverse the transformation before applying it from output image to input image (only for rigid and affine transforms)", argstr="--Inverse_ITK_Transformation ")
spacing = InputMultiPath(traits.Float, desc="Spacing along each dimension (0 means use input spacing)", sep=",", argstr="--spacing %s")
size = InputMultiPath(traits.Float, desc="Size along each dimension (0 means use input size)", sep=",", argstr="--size %s")
origin = traits.List(desc="Origin of the output Image", argstr="--origin %s")
direction_matrix = InputMultiPath(traits.Float, desc="9 parameters of the direction matrix by rows (ijk to LPS if LPS transform, ijk to RAS if RAS transform)", sep=",", argstr="--direction_matrix %s")
number_of_thread = traits.Int(desc="Number of thread used to compute the output image", argstr="--number_of_thread %d")
default_pixel_value = traits.Float(desc="Default pixel value for samples falling outside of the input region", argstr="--default_pixel_value %f")
window_function = traits.Enum("h", "c", "w", "l", "b", desc="Window Function , h = Hamming , c = Cosine , w = Welch , l = Lanczos , b = Blackman", argstr="--window_function %s")
spline_order = traits.Int(desc="Spline Order (Spline order may be from 0 to 5)", argstr="--spline_order %d")
transform_matrix = InputMultiPath(traits.Float, desc="12 parameters of the transform matrix by rows ( --last 3 being translation-- )", sep=",", argstr="--transform_matrix %s")
transform = traits.Enum("rt", "a", desc="Transform algorithm, rt = Rigid Transform, a = Affine Transform", argstr="--transform %s")
class ResampleDTIVolumeOutputSpec(TraitedSpec):
outputVolume = File(position=-1, desc="Resampled Volume", exists=True)
class ResampleDTIVolume(SEMLikeCommandLine):
"""title: Resample DTI Volume
category: Diffusion.Diffusion Tensor Images
description: Resampling an image is a very important task in image analysis. It is especially important in the frame of image registration. This module implements DT image resampling through the use of itk Transforms. The resampling is controlled by the Output Spacing. "Resampling" is performed in space coordinates, not pixel/grid coordinates. It is quite important to ensure that image spacing is properly set on the images involved. The interpolator is required since the mapping from one space to the other will often require evaluation of the intensity of the image at non-grid positions.
version: 0.1
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ResampleDTI
contributor: Francois Budin (UNC)
acknowledgements: This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149. Information on the National Centers for Biomedical Computing can be obtained from http://nihroadmap.nih.gov/bioinformatics
"""
input_spec = ResampleDTIVolumeInputSpec
output_spec = ResampleDTIVolumeOutputSpec
_cmd = "ResampleDTIVolume "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class DWIRicianLMMSEFilterInputSpec(CommandLineInputSpec):
iter = traits.Int(desc="Number of iterations for the noise removal filter.", argstr="--iter %d")
re = InputMultiPath(traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s")
rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s")
mnvf = traits.Int(desc="Minimum number of voxels in kernel used for filtering.", argstr="--mnvf %d")
mnve = traits.Int(desc="Minimum number of voxels in kernel used for estimation.", argstr="--mnve %d")
minnstd = traits.Int(desc="Minimum allowed noise standard deviation.", argstr="--minnstd %d")
maxnstd = traits.Int(desc="Maximum allowed noise standard deviation.", argstr="--maxnstd %d")
hrf = traits.Float(desc="How many histogram bins per unit interval.", argstr="--hrf %f")
uav = traits.Bool(desc="Use absolute value in case of negative square.", argstr="--uav ")
inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s")
outputVolume = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", argstr="%s")
compressOutput = traits.Bool(desc="Compress the data of the compressed file using gzip", argstr="--compressOutput ")
class DWIRicianLMMSEFilterOutputSpec(TraitedSpec):
outputVolume = File(position=-1, desc="Output DWI volume.", exists=True)
class DWIRicianLMMSEFilter(SEMLikeCommandLine):
"""title: DWI Rician LMMSE Filter
category: Diffusion.Diffusion Weighted Images
description: This module reduces noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. Images corresponding to each gradient direction, including baseline, are processed individually. The noise parameter is automatically estimated (noise estimation improved but slower).
Note that this is a general purpose filter for MRi images. The module jointLMMSE has been specifically designed for DWI volumes and shows a better performance, so its use is recommended instead.
A complete description of the algorithm in this module can be found in:
S. Aja-Fernandez, M. Niethammer, M. Kubicki, M. Shenton, and C.-F. Westin. Restoration of DWI data using a Rician LMMSE estimator. IEEE Transactions on Medical Imaging, 27(10): pp. 1389-1403, Oct. 2008.
version: 0.1.1.$Revision: 1 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/RicianLMMSEImageFilter
contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa), Marc Niethammer (UNC)
acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain).
"""
input_spec = DWIRicianLMMSEFilterInputSpec
output_spec = DWIRicianLMMSEFilterOutputSpec
_cmd = "DWIRicianLMMSEFilter "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class TractographyLabelMapSeedingInputSpec(CommandLineInputSpec):
InputVolume = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s")
inputroi = File(desc="Label map with seeding ROIs", exists=True, argstr="--inputroi %s")
OutputFibers = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Tractography result", argstr="%s")
useindexspace = traits.Bool(desc="Seed at IJK voxel grid", argstr="--useindexspace ")
seedspacing = traits.Float(desc="Spacing (in mm) between seed points, only matters if use Use Index Space is off", argstr="--seedspacing %f")
randomgrid = traits.Bool(desc="Enable random placing of seeds", argstr="--randomgrid ")
clthreshold = traits.Float(desc="Minimum Linear Measure for the seeding to start.", argstr="--clthreshold %f")
minimumlength = traits.Float(desc="Minimum length of the fibers (in mm)", argstr="--minimumlength %f")
maximumlength = traits.Float(desc="Maximum length of fibers (in mm)", argstr="--maximumlength %f")
stoppingmode = traits.Enum("LinearMeasure", "FractionalAnisotropy", desc="Tensor measurement used to stop the tractography", argstr="--stoppingmode %s")
stoppingvalue = traits.Float(desc="Tractography will stop when the stopping measurement drops below this value", argstr="--stoppingvalue %f")
stoppingcurvature = traits.Float(desc="Tractography will stop if radius of curvature becomes smaller than this number units are degrees per mm", argstr="--stoppingcurvature %f")
integrationsteplength = traits.Float(desc="Distance between points on the same fiber in mm", argstr="--integrationsteplength %f")
label = traits.Int(desc="Label value that defines seeding region.", argstr="--label %d")
writetofile = traits.Bool(desc="Write fibers to disk or create in the scene?", argstr="--writetofile ")
outputdirectory = traits.Either(traits.Bool, Directory(), hash_files=False, desc="Directory in which to save fiber(s)", argstr="--outputdirectory %s")
name = traits.Str(desc="Name to use for fiber files", argstr="--name %s")
class TractographyLabelMapSeedingOutputSpec(TraitedSpec):
OutputFibers = File(position=-1, desc="Tractography result", exists=True)
outputdirectory = Directory(desc="Directory in which to save fiber(s)", exists=True)
class TractographyLabelMapSeeding(SEMLikeCommandLine):
"""title: Tractography Label Map Seeding
category: Diffusion.Diffusion Tensor Images
description: Seed tracts on a Diffusion Tensor Image (DT) from a label map
version: 0.1.0.$Revision: 1892 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/Seeding
license: slicer3
contributor: Raul San Jose (SPL, BWH), Demian Wassermann (SPL, BWH)
acknowledgements: Laboratory of Mathematics in Imaging. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149.
"""
input_spec = TractographyLabelMapSeedingInputSpec
output_spec = TractographyLabelMapSeedingOutputSpec
_cmd = "TractographyLabelMapSeeding "
_outputs_filenames = {'OutputFibers': 'OutputFibers.vtk', 'outputdirectory': 'outputdirectory'}
class DWIJointRicianLMMSEFilterInputSpec(CommandLineInputSpec):
re = InputMultiPath(traits.Int, desc="Estimation radius.", sep=",", argstr="--re %s")
rf = InputMultiPath(traits.Int, desc="Filtering radius.", sep=",", argstr="--rf %s")
ng = traits.Int(desc="The number of the closest gradients that are used to jointly filter a given gradient direction (0 to use all).", argstr="--ng %d")
inputVolume = File(position=-2, desc="Input DWI volume.", exists=True, argstr="%s")
outputVolume = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Output DWI volume.", argstr="%s")
compressOutput = traits.Bool(desc="Compress the data of the compressed file using gzip", argstr="--compressOutput ")
class DWIJointRicianLMMSEFilterOutputSpec(TraitedSpec):
outputVolume = File(position=-1, desc="Output DWI volume.", exists=True)
class DWIJointRicianLMMSEFilter(SEMLikeCommandLine):
"""title: DWI Joint Rician LMMSE Filter
category: Diffusion.Diffusion Weighted Images
description: This module reduces Rician noise (or unwanted detail) on a set of diffusion weighted images. For this, it filters the image in the mean squared error sense using a Rician noise model. The N closest gradient directions to the direction being processed are filtered together to improve the results: the noise-free signal is seen as an n-diemensional vector which has to be estimated with the LMMSE method from a set of corrupted measurements. To that end, the covariance matrix of the noise-free vector and the cross covariance between this signal and the noise have to be estimated, which is done taking into account the image formation process.
The noise parameter is automatically estimated from a rough segmentation of the background of the image. In this area the signal is simply 0, so that Rician statistics reduce to Rayleigh and the noise power can be easily estimated from the mode of the histogram.
A complete description of the algorithm may be found in:
Antonio Tristan-Vega and Santiago Aja-Fernandez, DWI filtering using joint information for DTI and HARDI, Medical Image Analysis, Volume 14, Issue 2, Pages 205-218. 2010.
version: 0.1.1.$Revision: 1 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/JointRicianLMMSEImageFilter
contributor: Antonio Tristan Vega (UVa), Santiago Aja Fernandez (UVa)
acknowledgements: Partially founded by grant number TEC2007-67073/TCM from the Comision Interministerial de Ciencia y Tecnologia (Spain).
"""
input_spec = DWIJointRicianLMMSEFilterInputSpec
output_spec = DWIJointRicianLMMSEFilterOutputSpec
_cmd = "DWIJointRicianLMMSEFilter "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
class DiffusionWeightedVolumeMaskingInputSpec(CommandLineInputSpec):
inputVolume = File(position=-4, desc="Input DWI volume", exists=True, argstr="%s")
outputBaseline = traits.Either(traits.Bool, File(), position=-2, hash_files=False, desc="Estimated baseline volume", argstr="%s")
thresholdMask = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Otsu Threshold Mask", argstr="%s")
otsuomegathreshold = traits.Float(desc="Control the sharpness of the threshold in the Otsu computation. 0: lower threshold, 1: higher threhold", argstr="--otsuomegathreshold %f")
removeislands = traits.Bool(desc="Remove Islands in Threshold Mask?", argstr="--removeislands ")
class DiffusionWeightedVolumeMaskingOutputSpec(TraitedSpec):
outputBaseline = File(position=-2, desc="Estimated baseline volume", exists=True)
thresholdMask = File(position=-1, desc="Otsu Threshold Mask", exists=True)
class DiffusionWeightedVolumeMasking(SEMLikeCommandLine):
"""title: Diffusion Weighted Volume Masking
category: Diffusion.Diffusion Weighted Images
description: <p>Performs a mask calculation from a diffusion weighted (DW) image.</p><p>Starting from a dw image, this module computes the baseline image averaging all the images without diffusion weighting and then applies the otsu segmentation algorithm in order to produce a mask. this mask can then be used when estimating the diffusion tensor (dt) image, not to estimate tensors all over the volume.</p>
version: 0.1.0.$Revision: 1892 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionWeightedMasking
license: slicer3
contributor: Demian Wassermann (SPL, BWH)
"""
input_spec = DiffusionWeightedVolumeMaskingInputSpec
output_spec = DiffusionWeightedVolumeMaskingOutputSpec
_cmd = "DiffusionWeightedVolumeMasking "
_outputs_filenames = {'outputBaseline': 'outputBaseline.nii', 'thresholdMask': 'thresholdMask.nii'}
class DTIimportInputSpec(CommandLineInputSpec):
inputFile = File(position=-2, desc="Input DTI file", exists=True, argstr="%s")
outputTensor = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI volume", argstr="%s")
testingmode = traits.Bool(desc="Enable testing mode. Sample helix file (helix-DTI.nhdr) will be loaded into Slicer and converted in Nifti.", argstr="--testingmode ")
class DTIimportOutputSpec(TraitedSpec):
outputTensor = File(position=-1, desc="Output DTI volume", exists=True)
class DTIimport(SEMLikeCommandLine):
"""title: DTIimport
category: Diffusion.Diffusion Data Conversion
description: Import tensor datasets from various formats, including the NifTi file format
version: 1.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIImport
contributor: Sonia Pujol (SPL, BWH)
acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149.
"""
input_spec = DTIimportInputSpec
output_spec = DTIimportOutputSpec
_cmd = "DTIimport "
_outputs_filenames = {'outputTensor': 'outputTensor.nii'}
class DWIToDTIEstimationInputSpec(CommandLineInputSpec):
inputVolume = File(position=-3, desc="Input DWI volume", exists=True, argstr="%s")
mask = File(desc="Mask where the tensors will be computed", exists=True, argstr="--mask %s")
outputTensor = traits.Either(traits.Bool, File(), position=-2, hash_files=False, desc="Estimated DTI volume", argstr="%s")
outputBaseline = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Estimated baseline volume", argstr="%s")
enumeration = traits.Enum("LS", "WLS", desc="LS: Least Squares, WLS: Weighted Least Squares", argstr="--enumeration %s")
shiftNeg = traits.Bool(desc="Shift eigenvalues so all are positive (accounts for bad tensors related to noise or acquisition error)", argstr="--shiftNeg ")
class DWIToDTIEstimationOutputSpec(TraitedSpec):
outputTensor = File(position=-2, desc="Estimated DTI volume", exists=True)
outputBaseline = File(position=-1, desc="Estimated baseline volume", exists=True)
class DWIToDTIEstimation(SEMLikeCommandLine):
"""title: DWI to DTI Estimation
category: Diffusion.Diffusion Weighted Images
description: Performs a tensor model estimation from diffusion weighted images.
There are three estimation methods available: least squares, weigthed least squares and non-linear estimation. The first method is the traditional method for tensor estimation and the fastest one. Weighted least squares takes into account the noise characteristics of the MRI images to weight the DWI samples used in the estimation based on its intensity magnitude. The last method is the more complex.
version: 0.1.0.$Revision: 1892 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorEstimation
license: slicer3
contributor: Raul San Jose (SPL, BWH)
acknowledgements: This command module is based on the estimation functionality provided by the Teem library. This work is part of the National Alliance for Medical Image Computing (NAMIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149.
"""
input_spec = DWIToDTIEstimationInputSpec
output_spec = DWIToDTIEstimationOutputSpec
_cmd = "DWIToDTIEstimation "
_outputs_filenames = {'outputTensor': 'outputTensor.nii', 'outputBaseline': 'outputBaseline.nii'}
class DiffusionTensorScalarMeasurementsInputSpec(CommandLineInputSpec):
inputVolume = File(position=-3, desc="Input DTI volume", exists=True, argstr="%s")
outputScalar = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Scalar volume derived from tensor", argstr="%s")
enumeration = traits.Enum("Trace", "Determinant", "RelativeAnisotropy", "FractionalAnisotropy", "Mode", "LinearMeasure", "PlanarMeasure", "SphericalMeasure", "MinEigenvalue", "MidEigenvalue", "MaxEigenvalue", "MaxEigenvalueProjectionX", "MaxEigenvalueProjectionY", "MaxEigenvalueProjectionZ", "RAIMaxEigenvecX", "RAIMaxEigenvecY", "RAIMaxEigenvecZ", "MaxEigenvecX", "MaxEigenvecY", "MaxEigenvecZ", "D11", "D22", "D33", "ParallelDiffusivity", "PerpendicularDffusivity", desc="An enumeration of strings", argstr="--enumeration %s")
class DiffusionTensorScalarMeasurementsOutputSpec(TraitedSpec):
outputScalar = File(position=-1, desc="Scalar volume derived from tensor", exists=True)
class DiffusionTensorScalarMeasurements(SEMLikeCommandLine):
"""title: Diffusion Tensor Scalar Measurements
category: Diffusion.Diffusion Tensor Images
description: Compute a set of different scalar measurements from a tensor field, specially oriented for Diffusion Tensors where some rotationally invariant measurements, like Fractional Anisotropy, are highly used to describe the anistropic behaviour of the tensor.
version: 0.1.0.$Revision: 1892 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DiffusionTensorMathematics
contributor: Raul San Jose (SPL, BWH)
acknowledgements: LMI
"""
input_spec = DiffusionTensorScalarMeasurementsInputSpec
output_spec = DiffusionTensorScalarMeasurementsOutputSpec
_cmd = "DiffusionTensorScalarMeasurements "
_outputs_filenames = {'outputScalar': 'outputScalar.nii'}
class DTIexportInputSpec(CommandLineInputSpec):
inputTensor = File(position=-2, desc="Input DTI volume", exists=True, argstr="%s")
outputFile = traits.Either(traits.Bool, File(), position=-1, hash_files=False, desc="Output DTI file", argstr="%s")
class DTIexportOutputSpec(TraitedSpec):
outputFile = File(position=-1, desc="Output DTI file", exists=True)
class DTIexport(SEMLikeCommandLine):
"""title: DTIexport
category: Diffusion.Diffusion Data Conversion
description: Export DTI data to various file formats
version: 1.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/DTIExport
contributor: Sonia Pujol (SPL, BWH)
acknowledgements: This work is part of the National Alliance for Medical Image Computing (NA-MIC), funded by the National Institutes of Health through the NIH Roadmap for Medical Research, Grant U54 EB005149.
"""
input_spec = DTIexportInputSpec
output_spec = DTIexportOutputSpec
_cmd = "DTIexport "
_outputs_filenames = {'outputFile': 'outputFile'}
| {
"content_hash": "72b0f4c697816892e6c211cc545e2872",
"timestamp": "",
"source": "github",
"line_count": 370,
"max_line_length": 657,
"avg_line_length": 63.18378378378378,
"alnum_prop": 0.7595174950808452,
"repo_name": "FCP-INDI/nipype",
"id": "cb87deb4f5f33abf2807609537e14ce5a9414c5b",
"size": "23401",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "nipype/interfaces/slicer/diffusion/diffusion.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "2063"
},
{
"name": "Matlab",
"bytes": "1717"
},
{
"name": "Python",
"bytes": "5280923"
},
{
"name": "Shell",
"bytes": "1958"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
from django.conf.urls.defaults import url, patterns, include
from .app.views import TestView
urlpatterns = patterns(
'',
url('^', include('mob.urls', namespace = 'mob')),
url('^$', TestView.as_view()),
)
| {
"content_hash": "ae63432e504ff77e3de283c24652ba13",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 60,
"avg_line_length": 27.125,
"alnum_prop": 0.6497695852534562,
"repo_name": "caffeinehit/django-mob",
"id": "8edfa6f30a941ac06ac03514a611f2f072324514",
"size": "217",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "61760"
},
{
"name": "Shell",
"bytes": "104"
}
],
"symlink_target": ""
} |
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j ខែ F ឆ្នាំ Y'
TIME_FORMAT = 'G:i:s'
DATETIME_FORMAT = 'j ខែ F ឆ្នាំ Y, G:i:s'
# YEAR_MONTH_FORMAT =
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'j M Y'
SHORT_DATETIME_FORMAT = 'j M Y, G:i:s'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
| {
"content_hash": "e48f6645426b704db7e88a0f6e313ff7",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 77,
"avg_line_length": 33.9,
"alnum_prop": 0.672566371681416,
"repo_name": "marcosmodesto/django-testapp",
"id": "7a39d92ead90c79fe2a82c80aea5240763597c70",
"size": "810",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "django/conf/locale/km/formats.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "176342"
},
{
"name": "Python",
"bytes": "9034888"
},
{
"name": "Shell",
"bytes": "643"
}
],
"symlink_target": ""
} |
import jmespath
import logging
import time
from botocore_eb.exceptions import WaiterError
logger = logging.getLogger(__name__)
class Waiter(object):
"""Wait for a resource to reach a certain state.
In addition to creating this class manually, you can
also use ``botocore.service.Service.get_waiter`` to
create an instance of ``Waiter```.
The typical usage pattern is from a ``Service`` object::
ec2 = session.get_service('ec2')
p = ec2.get_operation('RunInstances').call(endpoint, **kwargs)[1]
instance_running = ec2.get_waiter('InstanceRunning')
instance_id = p['Reservations'][0]['Instances'][0]['InstanceId']
# This will block until the instance reaches a 'running' state.
instance_running.wait(instance_ids=[instance_id])
"""
def __init__(self, name, operation, config):
"""
:type name: str
:param name: The name of the waiter.
:type operation: ``botocore.operation.Operation``
:param operation: The operation associated with the waiter.
This is specified in the waiter configuration as the
``operation`` key.
:type config: dict
:param config: The waiter configuration.
"""
self.name = name
self.operation = operation
self.sleep_time = config['interval']
self.max_attempts = config['max_attempts']
self.success = self._process_config(config.get('success'))
self.failure = self._process_config(config.get('failure'))
def _process_config(self, acceptor_config):
if acceptor_config is None:
return {}
new_config = acceptor_config.copy()
if new_config['type'] == 'output' and \
new_config.get('path') is not None:
new_config['path'] = jmespath.compile(acceptor_config['path'])
return new_config
def wait(self, endpoint, **kwargs):
"""Wait until a resource reaches its success state.
Calling this method will block until the waiter reaches its
desired state. If the failure state is reached, a ``WaiterError``
is raised.
The ``**kwargs`` passed to this method will be forwarded to the
operation associated with the waiter.
:param endpoint: An instance of ``botocore.endpoint.Endpoint``.
"""
logger.debug("Waiter %s waiting.", self.name)
num_attempts = 0
while num_attempts < self.max_attempts:
http_response, parsed = self.operation.call(endpoint, **kwargs)
if self.success:
if self._matches_acceptor_state(self.success,
http_response, parsed):
# For the success state, if the acceptor matches then we
# break the loop.
break
if self.failure:
if self._matches_acceptor_state(self.failure,
http_response, parsed):
# For the failure state, if the acceptor matches then we
# raise an exception.
raise WaiterError(
name=self.name,
reason='Failure state matched one of: %s' %
', '.join(self.failure['value']))
logger.debug("No acceptor state reached for waiter %s, "
"attempt %s/%s, sleeping for: %s",
self.name, num_attempts, self.max_attempts,
self.sleep_time)
num_attempts += 1
time.sleep(self.sleep_time)
else:
error_msg = ("Max attempts (%s) exceeded for waiter %s without "
"reaching a terminal state."
% (self.max_attempts, self.name))
logger.debug(error_msg)
raise WaiterError(name=self.name, reason=error_msg)
def _matches_acceptor_state(self, acceptor, http_response, parsed):
if acceptor['type'] == 'output':
return self._matches_acceptor_output_type(acceptor, http_response,
parsed)
elif acceptor['type'] == 'error':
return self._matches_acceptor_error_type(acceptor, http_response,
parsed)
def _matches_acceptor_output_type(self, acceptor, http_response, parsed):
if 'path' not in acceptor and not self._get_error_codes_from_response(parsed):
# If there's no path specified, then a successful response means
# that we've matched the acceptor.
return True
match = acceptor['path'].search(parsed)
return self._path_matches_value(match, acceptor['value'])
def _path_matches_value(self, match, value):
# Determine if the matched data matches the config value.
if match is None:
return False
elif not isinstance(match, list):
# If match is not a list, then we need to perform an exact match,
# this is something like Table.TableStatus == 'CREATING'
return self._single_value_match(match, value)
elif isinstance(match, list):
# If ``match`` is a list, then we need to ensure that every element
# in ``match`` matches something in the ``value`` list.
return all(self._single_value_match(element, value)
for element in match)
else:
return False
def _single_value_match(self, match, value):
for v in value:
if match == v:
return True
else:
return False
def _matches_acceptor_error_type(self, acceptor, http_response, parsed):
if http_response.status_code >= 400 and 'Errors' in parsed:
error_codes = self._get_error_codes_from_response(parsed)
for v in acceptor['value']:
if v in error_codes:
return True
return False
def _get_error_codes_from_response(self, parsed):
errors = set()
for error in parsed.get('Errors', []):
if 'Code' in error:
errors.add(error['Code'])
return errors
| {
"content_hash": "5c977963bd07233d1c00b798ba69381d",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 86,
"avg_line_length": 40.0253164556962,
"alnum_prop": 0.5653067678684377,
"repo_name": "ianblenke/awsebcli",
"id": "6e920b060b9dd7faae0664e5d9172cc02b2d67ec",
"size": "6891",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "botocore_eb/waiter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groff",
"bytes": "208"
},
{
"name": "Makefile",
"bytes": "633"
},
{
"name": "Python",
"bytes": "3447856"
},
{
"name": "Shell",
"bytes": "280"
}
],
"symlink_target": ""
} |
import os
import time
from arnold import *
testNumber = "test_0001"
print("[FabricArnold::TestSuite] Generating reference image for {0}...".format(testNumber))
start = time.clock()
AiBegin()
# create a sphere
sphere = AiNode("sphere")
AiNodeSetStr(sphere, "name", "mysphere")
AiNodeSetFlt(sphere, "radius", 5.0)
# create a lambert shader
lambert = AiNode("lambert")
AiNodeSetStr(lambert, "name", "myshader")
AiNodeSetRGB(lambert, "Kd_color", 1.0, 0.0, 0.0)
# assign the sphere's shader
AiNodeSetPtr(sphere, "shader", lambert)
# create a perspective camera
camera = AiNode("persp_camera")
AiNodeSetStr(camera, "name", "mycamera")
AiNodeSetPnt(camera, "position", 0.0, 0.0, 20.0)
# create a point light
light = AiNode("point_light")
AiNodeSetStr(light, "name", "mylight")
AiNodeSetFlt(light, "exposure", 7.5)
AiNodeSetPnt(light, "position", 0.0, 10.0, 10.0)
# set render options
options = AiUniverseGetOptions()
AiNodeSetInt(options, "AA_samples", 1)
AiNodeSetInt(options, "xres", 320)
AiNodeSetInt(options, "yres", 240)
AiNodeSetPtr(options, "camera", camera)
# create an output driver
driver = AiNode("driver_jpeg")
AiNodeSetStr(driver, "name", "mydriver")
filename = os.path.join(os.getcwd(), testNumber, "reference.jpg")
AiNodeSetStr(driver, "filename", filename)
# create a gaussian filter node
gfilter = AiNode("gaussian_filter")
AiNodeSetStr(gfilter, "name", "myfilter");
# assign th driver and the filter to the outputs
outputs_array = AiArrayAllocate(1, 1, AI_TYPE_STRING)
AiArraySetStr(outputs_array, 0, "RGB RGB myfilter mydriver")
AiNodeSetArray(options, "outputs", outputs_array)
#AiArrayDestroy(outputs_array)
# render the scene
result = AiRender(AI_RENDER_MODE_CAMERA)
if result != AI_SUCCESS:
print("[FabricArnold::TestSuite] Error {0}".format(result))
AiEnd()
secs = time.clock() - start
print("Elapsed time: {0} seconds".format(secs))
| {
"content_hash": "9d45befdeb1f30c0afd4df343ae66b93",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 91,
"avg_line_length": 29.318181818181817,
"alnum_prop": 0.7085271317829457,
"repo_name": "wildparky/FabricArnold",
"id": "e093c8a942cffdaad7a3c1972f415bb1d9e61350",
"size": "2088",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_0001/reference.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import os
import sys
import hashlib
import configparser
import random
import binascii
ERR_PASSWD = "__err_passwd__"
INF_SERDOWN = "__info_serverdown__"
CL_HELLO = "__client_hello__"
CL_EXIT = "__client_bye__"
VERSION = '0.91.5 AES'
CODENAME = 'Pockenberg'
AUTHOR = 'Anna-Sophia Schroeck <[email protected]>'
PYMDString = 'pyMD ' + VERSION + ' ' + CODENAME
def get_hashfrompass(passwd):
salt = str(random.getrandbits(128))
dk = hashlib.sha256(str.encode(passwd + salt)).digest()
return dk
def byteToHex(byteHash):
return binascii.hexlify(byteHash).decode("utf-8")
def hexToByte(hexHash):
return binascii.unhexlify(str.encode(hexHash))
def get_config_path(file):
if sys.platform.startswith('linux'):
return "/etc/" + file
elif sys.platform.startswith('win'):
return file
elif sys.platform.startswith('darwin'):
return "/etc/" + file
def get_log_path():
if sys.platform.startswith('linux'):
return "/var/log/pymd.log"
elif sys.platform.startswith('win'):
return "pymd.log"
elif sys.platform.startswith('darwin'):
return "/var/log/pymd.log"
class client_config:
def __init__(self):
self.m_path = get_config_path("pyMDClient.ini")
self.m_config = configparser.ConfigParser()
if os.path.isfile(self.m_path) == True :
self.m_config.read(self.m_path)
else:
print("[Client First run] Create config")
host = input("Host: ")
port = input("Port: ")
has = input("hash: ")
self.m_config['client'] = {'hash': has,
'port': port,
'addr': host }
self.save()
self.m_config.read(self.m_path)
def save(self):
with open(self.m_path, 'w') as configfile:
self.m_config.write(configfile)
def get_server_port(self):
return int(self.m_config['client']['port'])
def get_server_addr(self):
return self.m_config['client']['addr']
def get_server_hash(self):
hexhash = self.m_config['client']['hash']
return hexToByte(hexhash)
class server_config:
def __init__(self):
self.m_path = get_config_path("pyMDServer.ini")
self.m_config = configparser.ConfigParser()
if os.path.isfile(self.m_path) == True :
self.m_config.read(self.m_path)
else:
print("[First run] Create config")
passwd = input("Please enter the server password: ")
temp = get_hashfrompass(passwd)
self.m_config['music'] = {'path': 'data',
'volume': '80',
'soundcard': '0'}
self.m_config['server'] = {'hash': byteToHex(temp),
'port': '8089',
'bind': 'localhost',
'loggingLevel': '0',
'loggingFile': get_log_path() }
self.save()
self.m_config.read(self.m_path)
def get_music_path(self):
return self.m_config['music']['path']
def get_music_volume(self):
return int(self.m_config['music']['volume'])
def get_server_hash(self):
hexhash = self.m_config['server']['hash']
return hexToByte(hexhash)
def get_server_port(self):
return int(self.m_config['server']['port'])
def get_server_addr(self):
return self.m_config['server']['bind']
def get_server_loggingLevel(self):
return int(self.m_config['server']['loggingLevel'])
def get_server_loggingFile(self):
return self.m_config['server']['loggingFile']
def set_music_path(self, path):
self.m_config['music']['path'] = path
def set_music_volume(self, volume):
self.m_config['music']['volume'] = volume
def set_server_pass(self, passwd):
self.m_config['server']['hash'] = get_hashfrompass(passwd)
def set_server_port(self, port):
self.m_config['server']['port'] = port
def save(self):
with open(self.m_path, 'w') as configfile:
self.m_config.write(configfile)
| {
"content_hash": "4a932a44a3e99031e3424cd658801ac4",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 70,
"avg_line_length": 33.828125,
"alnum_prop": 0.5480369515011547,
"repo_name": "RoseLeBlood/pyMD",
"id": "daef02842c805ecf989f0947c1c06e8fb93cc57f",
"size": "5222",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyMD.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "352501"
}
],
"symlink_target": ""
} |
import string
import types
from module_info import *
from module_mission_templates import *
from process_common import *
from process_operations import *
mission_template_name_pos = 0
mission_template_flags_pos = 1
mission_template_types_pos = 2
mission_template_desc_pos = 3
mission_template_groups_pos =4
mission_template_triggers_pos = 5
def save_triggers(file,template_name,triggers,variable_list,variable_uses,tag_uses,quick_strings):
file.write("%d\n"%len(triggers))
for i in xrange(len(triggers)):
trigger = triggers[i]
file.write("%f %f %f "%(trigger[trigger_check_pos],trigger[trigger_delay_pos],trigger[trigger_rearm_pos]))
save_statement_block(file, 0, 1, trigger[trigger_conditions_pos] , variable_list,variable_uses,tag_uses,quick_strings)
save_statement_block(file, 0, 1, trigger[trigger_consequences_pos], variable_list,variable_uses,tag_uses,quick_strings)
file.write("\n")
file.write("\n")
def save_mission_template_group(file,entry):
if (len(entry[5]) > 8):
print "ERROR: Too many item_overrides!"
error()
file.write("%d %d %d %d %d %d "%(entry[0],entry[1],entry[2],entry[3],entry[4], len(entry[5])))
for item_override in entry[5]:
add_tag_use(tag_uses,tag_item,item_override)
file.write("%d "%(item_override))
file.write("\n")
def save_mission_templates(variables,variable_uses,tag_uses,quick_strings):
file = open(export_dir + "mission_templates.txt","w")
file.write("missionsfile version 1\n")
file.write(" %d\n"%(len(mission_templates)))
for mission_template in mission_templates:
file.write("mst_%s %s %d "%(convert_to_identifier(mission_template[mission_template_name_pos]),convert_to_identifier(mission_template[mission_template_name_pos]),mission_template[mission_template_flags_pos]))
file.write(" %d\n"%(mission_template[mission_template_types_pos]))
file.write("%s \n"%(string.replace(mission_template[mission_template_desc_pos]," ","_")))
file.write("\n%d "%len(mission_template[mission_template_groups_pos]))
for group in mission_template[mission_template_groups_pos]:
save_mission_template_group(file,group)
save_triggers(file,convert_to_identifier(mission_template[mission_template_name_pos]), mission_template[mission_template_triggers_pos],variables,variable_uses,tag_uses,quick_strings)
file.write("\n")
file.close()
def save_python_header():
file = open("./ID_mission_templates.py","w")
for i_mission_template in xrange(len(mission_templates)):
file.write("mst_%s = %d\n"%(mission_templates[i_mission_template][0],i_mission_template))
file.close()
print "Exporting mission_template data..."
save_python_header()
variable_uses = []
variables = load_variables(export_dir, variable_uses)
tag_uses = load_tag_uses(export_dir)
quick_strings = load_quick_strings(export_dir)
save_mission_templates(variables,variable_uses,tag_uses,quick_strings)
save_variables(export_dir,variables,variable_uses)
save_tag_uses(export_dir,tag_uses)
save_quick_strings(export_dir,quick_strings)
#print "Finished."
"""
(
"defense_1",mtf_battle_mode,stand_fast,
"You wait, your weapons ready, your senses alert. Some of your companions start to sing an old song, trying to forget their fear. You smile and let your thoughts wander away for a second. Then the lookout's cry shatters the melancholy: 'Enemies! They are coming'",
[
(2,mtef_leader_only,0,group(1),1),
(2,mtef_no_leader,0,group(1),3),
(0,mtef_no_leader,0,group(1),0),
(4,mtef_enemy_party|mtef_reverse_order,0,group(2)|aif_start_alarmed,5)],
[],
[
(ti_val(0), ti_val(0), ti_once, [[mission_cookie_eq,0,0],[store_mission_timer_a,1],[ge,reg(1),10],[store_enemy_count,2],[lt,reg(2),3]], [[add_reinforcements_to_entry,3,3],[reset_mission_timer_a],[increment_mission_cookie,0]]),
(ti_val(0), ti_val(0), ti_once, [[mission_cookie_eq,0,1],[store_mission_timer_a,1],[ge,reg(1),10],[store_enemy_count,2],[lt,reg(2),3]], [[add_reinforcements_to_entry,3,3],[reset_mission_timer_a],[increment_mission_cookie,0]]),
(ti_val(0), ti_val(0), ti_once, [[mission_cookie_eq,5,0],[store_mission_timer_b,1],[ge,reg(1),10],[store_friend_count,2],[lt,reg(2),3]], [[add_reinforcements_to_entry,2,2],[reset_mission_timer_b],[increment_mission_cookie,5]]),
(ti_val(0), ti_val(0), ti_once, [[mission_cookie_eq,5,1],[store_mission_timer_b,1],[ge,reg(1),10],[store_friend_count,2],[lt,reg(2),3]], [[add_reinforcements_to_entry,2,2],[reset_mission_timer_b],[increment_mission_cookie,5]]),
(ti_val(0), ti_val(0), ti_once, [[all_enemies_defeated,2]], [[set_mission_result,1] ,[finish_mission,1]]),
(ti_val(0), ti_val(0), ti_once, [[main_hero_fallen]], [[set_mission_result,-1],[finish_mission,1]]),
],
),
(
"chase_1",mtf_battle_mode,chase,
"You close up on the enemy thinking that this will be an easy victory. But as you come within shooting range, enemy fighters stop fleeing and turn to meet you. Perhaps they are experienced enough to know that they can't outrun you. Or perhaps their warrior pride triumphed over their fear. Whatever it is, these men now seem to be willing to put up a fight and your job will not be easy",
[(0,mtef_leader_only,0,group(1),1),(0,mtef_regulars_only,0,group(1),1),(1,mtef_regulars_only|mtef_enemy_party,0,aisb_hit_run,1),(2,mtef_regulars_only|mtef_enemy_party,0,0,2),(3,mtef_regulars_only|mtef_enemy_party,0,0,1),(4,mtef_regulars_only|mtef_enemy_party,0,group(2),2)],
[],
[
(ti_val(0), ti_val(0), ti_once, [[all_enemies_defeated,2]], [[set_mission_result,1] ,[finish_mission,1]]),
(ti_val(0), ti_val(0), ti_once, [[main_hero_fallen]], [[set_mission_result,-1],[finish_mission,1]]),
],
),
(
"slip_siege_fight_1",mtf_battle_mode,retreat_fight,
"You lead your retreating party through a back road which, oddly enough, seems to be completely unguarded.\
You decide to proceed with extreme caution. Very soon, you spot a movement in the bushes ahead.\
Not taking any chances, you send an arrow into the bushes. The arrow sinks behind leaves and immediately produces\
a sharp cry followed by some heavy cursing. Within seconds, a score of armored warriors rush out of hiding places.",
[(2,mtef_leader_only,0,group(1),1),(2,mtef_no_leader,0,group(1)|aif_start_alarmed,3),(3,mtef_enemy_party|mtef_reverse_order,0,aif_start_alarmed,6)],
# [(0,mtef_leader_only,0,group(1),1),(0,mtef_regulars_only,0,group(1),2|mtnf_const|mtnf_rand),(3,mtef_regulars_only|mtef_enemy_party,0,aif_start_alarmed,2|mtnf_const),(4,mtef_regulars_only|mtef_enemy_party,0,aif_start_alarmed,2|mtnf_const|mtnf_rand)],
[],
[
(ti_val(0), ti_val(0), ti_once, [[all_enemies_defeated,3]], [[set_mission_result,1] ,[finish_mission,1]]),
(ti_val(0), ti_val(0), ti_once, [[main_hero_fallen]], [[set_mission_result,-1],[finish_mission,1]]),
],
),
"""
| {
"content_hash": "1fb4cbb17f3aa1bf362d9fc6ad93879a",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 392,
"avg_line_length": 61.29824561403509,
"alnum_prop": 0.6853176874642244,
"repo_name": "nycz/useful-sisters",
"id": "80360c483f168169a18da16cdc82047b2a71bcd7",
"size": "6988",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "process_mission_tmps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8309324"
},
{
"name": "Shell",
"bytes": "1950"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Main', '0004_auto_20170903_2029'),
]
operations = [
migrations.RenameField(
model_name='article',
old_name='category_id',
new_name='category',
),
migrations.RenameField(
model_name='category',
old_name='parent_id',
new_name='parent',
),
migrations.RemoveField(
model_name='category',
name='id',
),
migrations.AlterField(
model_name='category',
name='name',
field=models.CharField(max_length=20, primary_key=True, serialize=False),
),
]
| {
"content_hash": "623ffa819e933bc929a0fb5621eda97f",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 85,
"avg_line_length": 25,
"alnum_prop": 0.53375,
"repo_name": "Lobo-Prix/Blog",
"id": "be124357e070e6961fbbdc247b6a56c39a22483d",
"size": "873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Blog/Main/migrations/0005_auto_20170904_2024.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "52094"
},
{
"name": "HTML",
"bytes": "10652"
},
{
"name": "JavaScript",
"bytes": "110504"
},
{
"name": "Python",
"bytes": "22188"
}
],
"symlink_target": ""
} |
import mock
import re
from stronghold import conf
from stronghold.middleware import LoginRequiredMiddleware
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.test import TestCase
from django.test.client import RequestFactory
class StrongholdMiddlewareTestCase(TestCase):
def test_public_view_is_public(self):
response = self.client.get(reverse('public_view'))
self.assertEqual(response.status_code, 200)
def test_private_view_is_private(self):
response = self.client.get(reverse('protected_view'))
self.assertEqual(response.status_code, 302)
class LoginRequiredMiddlewareTests(TestCase):
def setUp(self):
self.middleware = LoginRequiredMiddleware()
self.request = RequestFactory().get('/test-protected-url/')
self.request.user = mock.Mock()
self.kwargs = {
'view_func': HttpResponse,
'view_args': [],
'view_kwargs': {},
'request': self.request,
}
def test_redirects_to_login_when_not_authenticated(self):
self.request.user.is_authenticated.return_value = False
response = self.middleware.process_view(**self.kwargs)
self.assertEqual(response.status_code, 302)
def test_returns_none_when_authenticated(self):
self.request.user.is_authenticated.return_value = True
response = self.middleware.process_view(**self.kwargs)
self.assertEqual(response, None)
def test_returns_none_when_url_is_in_public_urls(self):
self.request.user.is_authenticated.return_value = False
self.middleware.public_view_urls = [re.compile(r'/test-protected-url/')]
response = self.middleware.process_view(**self.kwargs)
self.assertEqual(response, None)
def test_returns_none_when_url_is_decorated_public(self):
self.request.user.is_authenticated.return_value = False
self.kwargs['view_func'].STRONGHOLD_IS_PUBLIC = True
response = self.middleware.process_view(**self.kwargs)
self.assertEqual(response, None)
| {
"content_hash": "fca40a8f5b41ba8e005cc3696421927b",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 80,
"avg_line_length": 31.432835820895523,
"alnum_prop": 0.6885090218423552,
"repo_name": "klenks/jobsportal",
"id": "51e1d5320108a80d288c258e051fa0aa8c03feb6",
"size": "2106",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "venv/lib/python2.7/site-packages/stronghold/tests/testmiddleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "138702"
},
{
"name": "HTML",
"bytes": "158529"
},
{
"name": "JavaScript",
"bytes": "250743"
},
{
"name": "Python",
"bytes": "7450092"
},
{
"name": "Shell",
"bytes": "3234"
}
],
"symlink_target": ""
} |
from interviewcake.tree_binary import *
def largest(node):
while node.right:
node = node.right
return node
def second_largest(node):
prev_largest = None
while node:
if node.right:
prev_largest = node
node = node.right
continue
if node.left:
return largest(node.left)
else:
break
return prev_largest
def kth_largest(k, node):
if node is None:
return None
stack = []
while node or len(stack) != 0:
if node:
stack.append(node)
node = node.right
else:
node = stack.pop()
k -= 1
if k == 0:
return node
node = node.left
return node
def kth_smallest(k, node):
if node is None:
return None
stack = []
while node or len(stack) != 0:
if node:
stack.append(node)
node = node.left
else:
node = stack.pop()
k -= 1
if k == 0:
return node
node = node.right
return node
| {
"content_hash": "3211e9d36a98179074ec0daf63d50fcd",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 39,
"avg_line_length": 18.62295081967213,
"alnum_prop": 0.4744718309859155,
"repo_name": "JDFagan/InterviewInPython",
"id": "6aa12d5c21bc6ea8f18af872b7501ecf5ba7ec66",
"size": "1136",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "interviewcake/tree_kth_largest_smallest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "38742"
},
{
"name": "Python",
"bytes": "113273"
}
],
"symlink_target": ""
} |
from collections import OrderedDict
from glob import glob
import os
from agate.table import Table
@classmethod
def from_csv(cls, dir_path, column_names=None, column_types=None, row_names=None, header=True, **kwargs):
"""
Create a new :class:`TableSet` from a directory of CSVs.
See :meth:`.Table.from_csv` for additional details.
:param dir_path:
Path to a directory full of CSV files. All CSV files in this
directory will be loaded.
:param column_names:
See :meth:`Table.__init__`.
:param column_types:
See :meth:`Table.__init__`.
:param row_names:
See :meth:`Table.__init__`.
:param header:
See :meth:`Table.from_csv`.
"""
from agate.tableset import TableSet
if not os.path.isdir(dir_path):
raise IOError('Specified path doesn\'t exist or isn\'t a directory.')
tables = OrderedDict()
for path in glob(os.path.join(dir_path, '*.csv')):
name = os.path.split(path)[1].strip('.csv')
tables[name] = Table.from_csv(path, column_names, column_types, row_names=row_names, header=header, **kwargs)
return TableSet(tables.values(), tables.keys())
| {
"content_hash": "4e61c8298cea88f8e22b1e5ed3a0787e",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 117,
"avg_line_length": 30.17948717948718,
"alnum_prop": 0.6448598130841121,
"repo_name": "flother/agate",
"id": "81af9b490e678ece4bc591534ae7bfd36ae3dc2e",
"size": "1200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "agate/tableset/from_csv.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "165242"
}
],
"symlink_target": ""
} |
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class Error(Model):
"""Error.
:param status:
:type status: int
:param message:
:type message: str
:param parent_error:
:type parent_error: :class:`Error
<Fixtures.AcceptanceTestsModelFlattening.models.Error>`
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'message': {'key': 'message', 'type': 'str'},
'parent_error': {'key': 'parentError', 'type': 'Error'},
}
def __init__(self, status=None, message=None, parent_error=None):
self.status = status
self.message = message
self.parent_error = parent_error
class ErrorException(HttpOperationError):
"""Server responsed with exception of type: 'Error'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(ErrorException, self).__init__(deserialize, response, 'Error', *args)
| {
"content_hash": "ff98952635120bcf929a7bb372f7fc5b",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 83,
"avg_line_length": 27.92105263157895,
"alnum_prop": 0.6352497643732328,
"repo_name": "tbombach/autorest",
"id": "f4119b0cfaede686ea7bc55626769aa3d516a9b7",
"size": "1535",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/ModelFlattening/autorestresourceflatteningtestservice/models/error.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "12942"
},
{
"name": "C#",
"bytes": "12920965"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "Go",
"bytes": "142529"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "7359188"
},
{
"name": "JavaScript",
"bytes": "4759348"
},
{
"name": "PowerShell",
"bytes": "44986"
},
{
"name": "Python",
"bytes": "2302507"
},
{
"name": "Ruby",
"bytes": "302089"
},
{
"name": "Shell",
"bytes": "423"
},
{
"name": "TypeScript",
"bytes": "179578"
}
],
"symlink_target": ""
} |
"""
pygments.lexers.textfmts
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various text formats.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Generic, Literal
from pygments.util import ClassNotFound
__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer']
class IrcLogsLexer(RegexLexer):
"""
Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
"""
name = 'IRC logs'
aliases = ['irc']
filenames = ['*.weechatlog']
mimetypes = ['text/x-irclog']
flags = re.VERBOSE | re.MULTILINE
timestamp = r"""
(
# irssi / xchat and others
(?: \[|\()? # Opening bracket or paren for the timestamp
(?: # Timestamp
(?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
(?:\d{1,4})
[T ])? # Date/time separator: T or space
(?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
(?: \d?\d)
)
(?: \]|\))?\s+ # Closing bracket or paren for the timestamp
|
# weechat
\d{4}\s\w{3}\s\d{2}\s # Date
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
|
# xchat
\w{3}\s\d{2}\s # Date
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
)?
"""
tokens = {
'root': [
# log start/end
(r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
# hack
("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
# normal msgs
("^" + timestamp + r"""
(\s*<.*?>\s*) # Nick """,
bygroups(Comment.Preproc, Name.Tag), 'msg'),
# /me msgs
("^" + timestamp + r"""
(\s*[*]\s+) # Star
(\S+\s+.*?\n) # Nick + rest of message """,
bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
# join/part msgs
("^" + timestamp + r"""
(\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
(\S+\s+) # Nick + Space
(.*?\n) # Rest of message """,
bygroups(Comment.Preproc, Keyword, String, Comment)),
(r"^.*?\n", Text),
],
'msg': [
(r"\S+:(?!//)", Name.Attribute), # Prefix
(r".*\n", Text, '#pop'),
],
}
class GettextLexer(RegexLexer):
"""
Lexer for Gettext catalog files.
.. versionadded:: 0.9
"""
name = 'Gettext Catalog'
aliases = ['pot', 'po']
filenames = ['*.pot', '*.po']
mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
tokens = {
'root': [
(r'^#,\s.*?$', Keyword.Type),
(r'^#:\s.*?$', Keyword.Declaration),
# (r'^#$', Comment),
(r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
(r'^(")([A-Za-z-]+:)(.*")$',
bygroups(String, Name.Property, String)),
(r'^".*"$', String),
(r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
bygroups(Name.Variable, Text, String)),
(r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
]
}
class HttpLexer(RegexLexer):
"""
Lexer for HTTP sessions.
.. versionadded:: 1.5
"""
name = 'HTTP'
aliases = ['http']
flags = re.DOTALL
def get_tokens_unprocessed(self, text, stack=('root',)):
"""Reset the content-type state."""
self.content_type = None
return RegexLexer.get_tokens_unprocessed(self, text, stack)
def header_callback(self, match):
if match.group(1).lower() == 'content-type':
content_type = match.group(5).strip()
if ';' in content_type:
content_type = content_type[:content_type.find(';')].strip()
self.content_type = content_type
yield match.start(1), Name.Attribute, match.group(1)
yield match.start(2), Text, match.group(2)
yield match.start(3), Operator, match.group(3)
yield match.start(4), Text, match.group(4)
yield match.start(5), Literal, match.group(5)
yield match.start(6), Text, match.group(6)
def continuous_header_callback(self, match):
yield match.start(1), Text, match.group(1)
yield match.start(2), Literal, match.group(2)
yield match.start(3), Text, match.group(3)
def content_callback(self, match):
content_type = getattr(self, 'content_type', None)
content = match.group()
offset = match.start()
if content_type:
from pygments.lexers import get_lexer_for_mimetype
possible_lexer_mimetypes = [content_type]
if '+' in content_type:
# application/calendar+xml can be treated as application/xml
# if there's not a better match.
general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
content_type)
possible_lexer_mimetypes.append(general_type)
for i in possible_lexer_mimetypes:
try:
lexer = get_lexer_for_mimetype(i)
except ClassNotFound:
pass
else:
for idx, token, value in lexer.get_tokens_unprocessed(content):
yield offset + idx, token, value
return
yield offset, Text, content
tokens = {
'root': [
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
r'(HTTP)(/)(1\.[01])(\r?\n|\Z)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
(r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
Text, Name.Exception, Text),
'headers'),
],
'headers': [
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
(r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
(r'\r?\n', Text, 'content')
],
'content': [
(r'.+', content_callback)
]
}
def analyse_text(text):
return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
'OPTIONS /', 'TRACE /', 'PATCH /'))
class TodotxtLexer(RegexLexer):
"""
Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
.. versionadded:: 2.0
"""
name = 'Todotxt'
aliases = ['todotxt']
# *.todotxt is not a standard extension for Todo.txt files; including it
# makes testing easier, and also makes autodetecting file type easier.
filenames = ['todo.txt', '*.todotxt']
mimetypes = ['text/x-todo']
# Aliases mapping standard token types of Todo.txt format concepts
CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
IncompleteTaskText = Text # Incomplete tasks should look like plain text
# Priority should have most emphasis to indicate importance of tasks
Priority = Generic.Heading
# Dates should have next most emphasis because time is important
Date = Generic.Subheading
# Project and context should have equal weight, and be in different colors
Project = Generic.Error
Context = String
# If tag functionality is added, it should have the same weight as Project
# and Context, and a different color. Generic.Traceback would work well.
# Regex patterns for building up rules; dates, priorities, projects, and
# contexts are all atomic
# TODO: Make date regex more ISO 8601 compliant
date_regex = r'\d{4,}-\d{2}-\d{2}'
priority_regex = r'\([A-Z]\)'
project_regex = r'\+\S+'
context_regex = r'@\S+'
# Compound regex expressions
complete_one_date_regex = r'(x )(' + date_regex + r')'
complete_two_date_regex = (complete_one_date_regex + r'( )(' +
date_regex + r')')
priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
tokens = {
# Should parse starting at beginning of line; each line is a task
'root': [
# Complete task entry points: two total:
# 1. Complete task with two dates
(complete_two_date_regex, bygroups(CompleteTaskText, Date,
CompleteTaskText, Date),
'complete'),
# 2. Complete task with one date
(complete_one_date_regex, bygroups(CompleteTaskText, Date),
'complete'),
# Incomplete task entry points: six total:
# 1. Priority plus date
(priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
'incomplete'),
# 2. Priority only
(priority_regex, Priority, 'incomplete'),
# 3. Leading date
(date_regex, Date, 'incomplete'),
# 4. Leading context
(context_regex, Context, 'incomplete'),
# 5. Leading project
(project_regex, Project, 'incomplete'),
# 6. Non-whitespace catch-all
('\S+', IncompleteTaskText, 'incomplete'),
],
# Parse a complete task
'complete': [
# Newline indicates end of task, should return to root
(r'\s*\n', CompleteTaskText, '#pop'),
# Tokenize contexts and projects
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
('\S+', CompleteTaskText),
# Tokenize whitespace not containing a newline
('\s+', CompleteTaskText),
],
# Parse an incomplete task
'incomplete': [
# Newline indicates end of task, should return to root
(r'\s*\n', IncompleteTaskText, '#pop'),
# Tokenize contexts and projects
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
('\S+', IncompleteTaskText),
# Tokenize whitespace not containing a newline
('\s+', IncompleteTaskText),
],
}
| {
"content_hash": "59e2c24789d707b3e3f986718c5f5b5d",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 89,
"avg_line_length": 36.58108108108108,
"alnum_prop": 0.5026782415958626,
"repo_name": "GarySparrow/mFlaskWeb",
"id": "cab9add544648d63129cb8d0632b7f412caa6712",
"size": "10852",
"binary": false,
"copies": "21",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/pygments/lexers/textfmts.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1463"
},
{
"name": "C",
"bytes": "409372"
},
{
"name": "C++",
"bytes": "135910"
},
{
"name": "CSS",
"bytes": "270728"
},
{
"name": "HTML",
"bytes": "49354"
},
{
"name": "JavaScript",
"bytes": "148315"
},
{
"name": "Mako",
"bytes": "5339"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "15755700"
},
{
"name": "Shell",
"bytes": "133"
},
{
"name": "Tcl",
"bytes": "1285363"
}
],
"symlink_target": ""
} |
"""
setup.py for mock.matchers
"""
__author__ = "Ali-Akber Saifee"
__email__ = "[email protected]"
__copyright__ = "Copyright 2014, Ali-Akber Saifee"
from setuptools import setup, find_packages
import os
this_dir = os.path.abspath(os.path.dirname(__file__))
REQUIREMENTS = filter(None, open(
os.path.join(this_dir, 'requirements', 'main.txt')).read().splitlines())
import versioneer
versioneer.versionfile_source = "mock_matchers/_version.py"
versioneer.versionfile_build = "mock_matchers/version.py"
versioneer.tag_prefix = ""
versioneer.parentdir_prefix = "mock_matchers-"
setup(
name='mock_matchers',
author=__author__,
author_email=__email__,
license=open("LICENSE.txt").read(),
url="https://github.com/alisaifee/mock-matchers",
zip_safe=False,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
install_requires=REQUIREMENTS,
classifiers=[k for k in open('CLASSIFIERS').read().split('\n') if k],
description='hamcrest matchers for mock assertions',
long_description=open('README.rst').read(),
packages=find_packages(exclude=["tests*"]),
)
| {
"content_hash": "6593efbad7c5326b6ae8d5eadf084dd5",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 76,
"avg_line_length": 29.605263157894736,
"alnum_prop": 0.696,
"repo_name": "alisaifee/mock-matchers",
"id": "da9489c111add49869f9b1a137c86188ea3eb157",
"size": "1125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "32925"
},
{
"name": "Shell",
"bytes": "207"
}
],
"symlink_target": ""
} |
import argparse
import json
import yaml
from typing import Dict, List
skipped_phi_api_list_file = "/tools/infrt/skipped_phi_api.json"
api_yaml_file = "/paddle/phi/api/yaml/api.yaml"
legacy_api_yaml_file = "/paddle/phi/api/yaml/legacy_api.yaml"
def get_skipped_kernel_list():
skiped_kernel_list = []
with open(skipped_phi_api_list_file, 'r') as f:
skiped_api_list = json.load(f)
infer_meta_data = []
api_meta_data = get_api_yaml_info(api_yaml_file)
legacy_api_meta_data = get_api_yaml_info(legacy_api_yaml_file)
if api_meta_data:
infer_meta_data.extend(api_meta_data)
if legacy_api_meta_data:
infer_meta_data.extend(legacy_api_meta_data)
for api in infer_meta_data:
if "kernel" not in api or "infer_meta" not in api:
continue
if api["op"] in skiped_api_list["phi_apis"]:
skiped_kernel_list.append(api["kernel"]["func"])
skiped_kernel_list += skiped_api_list["phi_kernels"]
return skiped_kernel_list
def parse_args():
parser = argparse.ArgumentParser("gather phi kernel and infermate info")
parser.add_argument(
"--paddle_root_path",
type=str,
required=True,
help="root path of paddle src[WORK_PATH/Paddle].",
)
parser.add_argument(
"--kernel_info_file",
type=str,
required=True,
help="kernel info file generated by get_phi_kernel_function.sh.",
)
parser.add_argument(
"--infermeta_wrap_file",
type=str,
required=True,
help="inferMeta wrap info file.",
)
parser.add_argument(
"--attr_info_file", type=str, required=True, help="attr info file."
)
parser.add_argument(
"--generate_file",
type=str,
required=True,
default="../paddle/infrt/kernel/phi/infershaped/infershaped_kernel_launchers.cc",
help="generated file.",
)
args = parser.parse_args()
return args
def get_api_yaml_info(file_path):
f = open(file_path, "r")
cont = f.read()
return yaml.load(cont, Loader=yaml.FullLoader)
def get_kernel_info(file_path):
f = open(file_path, "r")
cont = f.readlines()
ret = []
prev = []
for line in cont:
info = line.strip().split()
if not info:
continue
if len(prev) == 0:
ret.append(line.strip())
prev = info
continue
if prev[0] == info[0] and prev[1] == info[1]:
ret.pop()
ret.append(line.strip())
prev = info
return ret
def get_infermeta_info(file_path):
f = open(file_path, "r")
cont = f.readlines()
return [l.strip() for l in cont if l.strip() != ""]
def get_attr_info(file_path):
"""
phi_gpu.argsort.float64.any $axisBool$descending
"""
ret = {}
with open(file_path, 'r') as f:
cont = f.readlines()
for l in cont:
datas = l.strip().split(' ')
if len(datas) == 2:
attrs = datas[1].split('$')
ret[datas[0]] = attrs[1:]
else:
ret[datas[0]] = None
return ret
def merge(infer_meta_data, kernel_data, wrap_data):
meta_map = {}
for api in infer_meta_data:
if "kernel" not in api or "infer_meta" not in api:
continue
meta_map[api["kernel"]["func"]] = api["infer_meta"]["func"]
wrap_map = {}
for l in wrap_data:
wrap_map[l.split()[0]] = l.split()[1]
full_kernel_data = []
for l in kernel_data:
key = l.split()[0]
if key in wrap_map:
full_kernel_data.append((l + " " + wrap_map[key]).split())
elif key in meta_map:
full_kernel_data.append((l + " " + meta_map[key]).split())
else:
full_kernel_data.append((l + " unknown").split())
return full_kernel_data
def gen_warn_info():
return """// Generated by tools/infrt/gen_phi_kernel_register.py for infrt.
// DO NOT edit or include it within paddle.
"""
def gen_include_headers():
return """
#include "paddle/infrt/kernel/phi/infershaped/infershaped_kernel_launchers.h"
#include "paddle/infrt/kernel/phi/infershaped/phi_kernel_launcher.h"
#include "paddle/phi/backends/all_context.h"
#include "paddle/phi/include/kernels.h"
#include "paddle/phi/include/infermeta.h"
#include "paddle/phi/infermeta/generated.h"
"""
def gen_namespace():
return (
"""
namespace infrt {
namespace kernel {
""",
"""
} // namespace kernel
} // namespace infrt
""",
)
def gen_context(val):
if val == "CPU":
return "::phi::CPUContext", "phi_cpu"
elif val == "GPU":
return "::phi::GPUContext", "phi_gpu"
# elif val == "XPU":
# return "::phi::XPUContext", "phi_xpu"
else:
# raise Exception(f"Unknown context type {val}")
return "", ""
def gen_layout(val):
if val == "ALL_LAYOUT":
return 'any'
else:
# TODO(wilber): now only process ALL_LAYOUT
raise Exception(f"Unknown layout type {val}")
def gen_kernel_func(val, ctx_name, dtype_name):
if '<' in val and '>' in val:
st = val.index('<')
ed = val.index('>')
func_name = val[:st]
template_name = val[st + 1 : ed]
if '::phi::' in template_name:
return "&::phi::" + val
else:
return "&::phi::" + func_name + "<::phi::" + template_name + ">"
else:
return "&::phi::" + val + "<" + dtype_name + ", " + ctx_name + ">"
def gen_dtype(vals: List[str]):
ir_dtypes, origin_dtypes = [], []
for val in vals:
if val == "float":
ir_dtypes.append("float32")
origin_dtypes.append("float")
elif val == "double":
ir_dtypes.append("float64")
origin_dtypes.append("double")
elif val == "float16":
ir_dtypes.append("float16")
origin_dtypes.append("paddle::experimental::float16")
elif val == "bfloat16":
ir_dtypes.append("bf16")
origin_dtypes.append("paddle::experimental::bfloat16")
elif val == "bool":
ir_dtypes.append("bool")
origin_dtypes.append("bool")
elif val == "int8_t":
ir_dtypes.append("int8")
origin_dtypes.append("int8_t")
elif val == "uint8_t":
ir_dtypes.append("uint8")
origin_dtypes.append("uint8_t")
elif val == "int16_t":
ir_dtypes.append("int16")
origin_dtypes.append("int16_t")
elif val == "int" or val == "int32_t":
ir_dtypes.append("int32")
origin_dtypes.append("int32_t")
elif val == "int64_t":
ir_dtypes.append("int64")
origin_dtypes.append("int64_t")
elif val == "complex<float>" or val == "complex64":
ir_dtypes.append("complex64")
origin_dtypes.append("paddle::experimental::complex64")
elif val == "complex<double>" or val == "complex128":
ir_dtypes.append("complex128")
origin_dtypes.append("paddle::experimental::complex128")
elif val == "pstring":
ir_dtypes.append("pstring")
origin_dtypes.append("paddle::experimental::pstring")
elif val == "ALL_DTYPE":
ir_dtypes.append("all")
origin_dtypes.append("all")
else:
if "VA_ARGS" in val:
continue
raise Exception(f"Unknown data type {val}")
return ir_dtypes, origin_dtypes
# Note: Now only process CPUContext and GPUContext.
def gen_register_code_info(item: List[str], attr_data: Dict[str, List[str]]):
"""
item: ['add', 'CPU', 'ALL_LAYOUT', 'AddKernel', 'float', 'double', '...'(varaidic types), 'ElementwiseInferMeta']
attr_data: {'phi_cpu.arg_min.float32.any': ['axisBool', 'keepdimsBool', 'flatten', 'dtype']}
"""
ctx_name, ir_ctx_name = gen_context(item[1])
if ctx_name == "":
return ""
item[2] = gen_layout(item[2])
ir_dtypes, origin_dtypes = gen_dtype(item[4:-1])
infer_shape_func = "&::phi::" + item[-1]
res = ""
if item[-1] == "unknown":
# TODO(wilber): handle the unknown inferShape func.
return ""
skipped_kernel_list = get_skipped_kernel_list()
for ir_dtype, origin_dtype in zip(ir_dtypes, origin_dtypes):
kernel_func = gen_kernel_func(item[3], ctx_name, origin_dtype)
if item[0].lower() in skipped_kernel_list:
continue
ir_name = (
ir_ctx_name
+ '.'
+ item[0].lower()
+ '.'
+ ir_dtype
+ '.'
+ item[2].lower()
)
if ir_name in attr_data.keys() and attr_data[ir_name] is not None:
attr_names = ', '.join(
["\"" + a + "\"" for a in attr_data[ir_name]]
)
res += f"""
registry->AddKernel("{ir_name}","""
res += f"""
&KernelLauncherFunc<decltype({kernel_func}),
{kernel_func},
decltype({infer_shape_func}),
{infer_shape_func}>,
{{{attr_names}}});
"""
else:
res += f"""
registry->AddKernel("{ir_name}","""
res += f"""
&KernelLauncherFunc<decltype({kernel_func}),
{kernel_func},
decltype({infer_shape_func}),
{infer_shape_func}>);
"""
return res
def gen_register_info(
resources: List[List[str]], attr_data: Dict[str, List[str]]
):
"""
resources: [['add', 'CPU', 'ALL_LAYOUT', 'AddKernel', 'float', 'double', '...'(varaidic types), 'ElementwiseInferMeta'], ...]
attr_data: {'phi_cpu.arg_min.float32.any': ['axisBool', 'keepdimsBool', 'flatten', 'dtype']}
"""
res = "void RegisterInferShapeLaunchers(host_context::KernelRegistry* registry) {"
# register cpu kernels.
for item in resources:
# The output string is polluted by C++ macros, here the \ is removed
update_item = [v.strip('\\') for v in item]
if update_item[1] != "CPU":
continue
code = gen_register_code_info(item, attr_data)
if code == "":
continue
res += code
# register gpu kernels.
res += "\n#ifdef INFRT_WITH_GPU"
for item in resources:
# The output string is polluted by C++ macros, here the \ is removed
update_item = [v.strip('\\') for v in item]
if update_item[1] != "GPU":
continue
code = gen_register_code_info(item, attr_data)
if code == "":
continue
res += code
res += "#endif // INFRT_WITH_GPU"
res += "\n}"
return res
def gen_phi_kernel_register_code(
resources: List[List[str]],
attr_data: Dict[str, List[str]],
src_file_path: str,
):
source_file = open(src_file_path, 'w')
source_file.write(gen_warn_info())
source_file.write(gen_include_headers())
namespace = gen_namespace()
source_file.write(namespace[0])
source_file.write(gen_register_info(resources, attr_data))
source_file.write(namespace[1])
source_file.close()
if __name__ == "__main__":
args = parse_args()
skipped_phi_api_list_file = (
args.paddle_root_path + skipped_phi_api_list_file
)
api_yaml_file = args.paddle_root_path + api_yaml_file
legacy_api_yaml_file = args.paddle_root_path + legacy_api_yaml_file
infer_meta_data = []
api_meta_data = get_api_yaml_info(api_yaml_file)
legacy_api_meta_data = get_api_yaml_info(legacy_api_yaml_file)
if api_meta_data:
infer_meta_data.extend(api_meta_data)
if legacy_api_meta_data:
infer_meta_data.extend(legacy_api_meta_data)
kernel_data = get_kernel_info(args.kernel_info_file)
info_meta_wrap_data = get_infermeta_info(args.infermeta_wrap_file)
attr_data = get_attr_info(args.attr_info_file)
out = merge(infer_meta_data, kernel_data, info_meta_wrap_data)
gen_phi_kernel_register_code(out, attr_data, args.generate_file)
| {
"content_hash": "039afbc3a96e60aeb6cd63d6b987bf73",
"timestamp": "",
"source": "github",
"line_count": 392,
"max_line_length": 129,
"avg_line_length": 31.005102040816325,
"alnum_prop": 0.5567714332729965,
"repo_name": "PaddlePaddle/Paddle",
"id": "9eeee88276d80c58839a3db40aa29b00d93bbde1",
"size": "12780",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tools/infrt/get_phi_kernel_info.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36848680"
},
{
"name": "CMake",
"bytes": "902619"
},
{
"name": "Cuda",
"bytes": "5227207"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36203874"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553177"
}
],
"symlink_target": ""
} |
import datetime
import logging
import re
from concurrent.futures import ThreadPoolExecutor
import botocore
import pytz
import autoscaler.aws_utils as aws_utils
import autoscaler.utils as utils
logger = logging.getLogger(__name__)
class AutoScalingGroups(object):
_BOTO_CLIENT_TYPE = 'autoscaling'
_CLUSTER_KEY = 'KubernetesCluster'
_ROLE_KEYS = ('KubernetesRole', 'Role')
_WORKER_ROLE_VALUES = ('worker', 'kubernetes-minion')
def __init__(self, session, regions, cluster_name=None):
"""
cluster_name - if set, filter ASGs by cluster_name in tag field
_CLUSTER_KEY
"""
self.session = session
self.regions = regions
self.cluster_name = cluster_name
@staticmethod
def get_all_raw_groups_and_launch_configs(client):
raw_groups = aws_utils.fetch_all(
client.describe_auto_scaling_groups, {'MaxRecords': 100}, 'AutoScalingGroups')
all_launch_configs = {}
batch_size = 50
for launch_config_idx in range(0, len(raw_groups), batch_size):
groups = raw_groups[launch_config_idx*batch_size:(launch_config_idx+1)*batch_size]
kwargs = {
'LaunchConfigurationNames': [g['LaunchConfigurationName'] for g in groups]
}
launch_configs = aws_utils.fetch_all(
client.describe_launch_configurations,
kwargs, 'LaunchConfigurations')
all_launch_configs.update((lc['LaunchConfigurationName'], lc)
for lc in launch_configs)
return raw_groups, all_launch_configs
def get_all_groups(self, kube_nodes):
groups = []
with ThreadPoolExecutor(max_workers=max(1, len(self.regions))) as executor:
raw_groups_and_launch_configs = {}
for region in self.regions:
client = self.session.client(self._BOTO_CLIENT_TYPE,
region_name=region)
raw_groups_and_launch_configs[region] = executor.submit(
AutoScalingGroups.get_all_raw_groups_and_launch_configs, client)
for region in self.regions:
raw_groups, launch_configs = raw_groups_and_launch_configs[region].result()
client = self.session.client(self._BOTO_CLIENT_TYPE,
region_name=region)
for raw_group in sorted(raw_groups, key=lambda g: g['AutoScalingGroupName']):
if self.cluster_name:
cluster_name = None
role = None
for tag in raw_group['Tags']:
if tag['Key'] == self._CLUSTER_KEY:
cluster_name = tag['Value']
elif tag['Key'] in self._ROLE_KEYS:
role = tag['Value']
if cluster_name != self.cluster_name or role not in self._WORKER_ROLE_VALUES:
continue
groups.append(AutoScalingGroup(
client, region, kube_nodes, raw_group,
launch_configs[raw_group['LaunchConfigurationName']]))
return groups
class AutoScalingTimeouts(object):
_TIMEOUT = 3600 # 1 hour
_SPOT_REQUEST_TIMEOUT = 300 # 5 minutes
_MAX_OUTBIDS_IN_INTERVAL = 60*20 # 20 minutes
_SPOT_HISTORY_PERIOD = 60*60*5 # 5 hours
def __init__(self, session):
"""
"""
self.session = session
# ASGs to avoid because of recent launch failures
# e.g. a region running out of capacity
# try to favor other regions
self._timeouts = {}
self._last_activities = {}
# ASGs to avoid because of spot pricing history
self._spot_timeouts = {}
self._spot_price_history = {}
def refresh_timeouts(self, asgs, dry_run=False):
"""
refresh timeouts on ASGs using new data from aws
"""
self.time_out_spot_asgs(asgs)
asgs_by_region = {}
for asg in asgs:
asgs_by_region.setdefault(asg.region, []).append(asg)
for region, regional_asgs in asgs_by_region.items():
client = self.session.client('autoscaling', region_name=region)
start_time_cutoff = None
newest_completed_activity = None
activities = {}
for activity in self.iter_activities(client):
if newest_completed_activity is None and activity['Progress'] == 100:
newest_completed_activity = activity
if activity['ActivityId'] == self._last_activities.get(region, None):
break
if start_time_cutoff is None:
start_time_cutoff = (
datetime.datetime.now(activity['StartTime'].tzinfo) -
datetime.timedelta(seconds=self._TIMEOUT))
if activity['StartTime'] < start_time_cutoff:
# skip events that are too old to cut down the time
# it takes the first time to go through events
break
activities.setdefault(activity['AutoScalingGroupName'], []).append(activity)
self._last_activities[region] = newest_completed_activity['ActivityId']
for asg in regional_asgs:
self.reconcile_limits(asg, activities.get(asg.name, []), dry_run=dry_run)
def iter_activities(self, client):
next_token = None
while True:
kwargs = {}
if next_token:
kwargs['NextToken'] = next_token
data = client.describe_scaling_activities(**kwargs)
for item in data['Activities']:
yield item
next_token = data.get('NextToken')
if not next_token:
break
def revert_capacity(self, asg, entry, dry_run):
"""
try to decrease desired capacity to the original
capacity before the capacity increase that caused
the ASG activity entry.
"""
cause_m = AutoScalingCauseMessages.LAUNCH_INSTANCE.search(entry.get('Cause', ''))
if cause_m:
original_capacity = int(cause_m.group('original_capacity'))
if asg.desired_capacity > original_capacity:
# we tried to go over capacity and failed
# now set the desired capacity back to a normal range
if not dry_run:
asg.set_desired_capacity(original_capacity)
else:
logger.info('[Dry run] Would have set desired capacity to %s', original_capacity)
return True
return False
def time_out_asg(self, asg, entry):
self._timeouts[asg._id] = (
entry['StartTime'] + datetime.timedelta(seconds=self._TIMEOUT))
logger.info('%s is timed out until %s',
asg.name, self._timeouts[asg._id])
def reconcile_limits(self, asg, activities, dry_run=False):
"""
makes sure the ASG has valid capacity by processing errors
in its recent scaling activities.
marks an ASG as timed out if it recently had a capacity
failure.
"""
for entry in activities:
status_msg = entry.get('StatusMessage', '')
if entry['StatusCode'] in ('Failed', 'Cancelled'):
logger.warn('%s scaling failure: %s', asg, entry)
m = AutoScalingErrorMessages.INSTANCE_LIMIT.match(status_msg)
if m:
max_desired_capacity = int(m.group('requested')) - 1
if asg.desired_capacity > max_desired_capacity:
self.time_out_asg(asg, entry)
# we tried to go over capacity and failed
# now set the desired capacity back to a normal range
if not dry_run:
asg.set_desired_capacity(max_desired_capacity)
else:
logger.info('[Dry run] Would have set desired capacity to %s', max_desired_capacity)
return
m = AutoScalingErrorMessages.VOLUME_LIMIT.match(status_msg)
if m:
# TODO: decrease desired capacity
self.time_out_asg(asg, entry)
return
m = AutoScalingErrorMessages.CAPACITY_LIMIT.match(status_msg)
if m:
reverted = self.revert_capacity(asg, entry, dry_run)
if reverted:
self.time_out_asg(asg, entry)
return
m = AutoScalingErrorMessages.AZ_LIMIT.search(status_msg)
if m and 'only-az' in asg.name:
reverted = self.revert_capacity(asg, entry, dry_run)
if reverted:
self.time_out_asg(asg, entry)
return
m = AutoScalingErrorMessages.SPOT_REQUEST_CANCELLED.search(status_msg)
if m:
# we cancelled a spot request
# don't carry on to reset timeout
continue
m = AutoScalingErrorMessages.SPOT_LIMIT.match(status_msg)
if m:
self.time_out_asg(asg, entry)
if not dry_run:
asg.set_desired_capacity(asg.actual_capacity)
else:
logger.info('[Dry run] Would have set desired capacity to %s', asg.actual_capacity)
return
elif entry['StatusCode'] == 'WaitingForSpotInstanceId':
logger.warn('%s waiting for spot: %s', asg, entry)
balance_cause_m = AutoScalingCauseMessages.AZ_BALANCE.search(entry.get('Cause', ''))
if balance_cause_m:
# sometimes ASGs will launch instances in other az's to
# balance out the group
# ignore these events
# even if we cancel it, the ASG will just attempt to
# launch again
logger.info('ignoring AZ balance launch event')
continue
now = datetime.datetime.now(entry['StartTime'].tzinfo)
if (now - entry['StartTime']) > datetime.timedelta(seconds=self._SPOT_REQUEST_TIMEOUT):
self.time_out_asg(asg, entry)
# try to cancel spot request and scale down ASG
spot_request_m = AutoScalingErrorMessages.SPOT_REQUEST_WAITING.search(status_msg)
if spot_request_m:
spot_request_id = spot_request_m.group('request_id')
if not dry_run:
cancelled = self.cancel_spot_request(asg.region, spot_request_id)
if cancelled:
asg.set_desired_capacity(asg.desired_capacity - 1)
else:
logger.info('[Dry run] Would have cancelled spot request %s and decremented desired capacity.',
spot_request_id)
# don't return here so that we can cancel more spot requests
self._timeouts[asg._id] = None
logger.debug('%s has no timeout', asg.name)
def is_timed_out(self, asg):
timeout = self._timeouts.get(asg._id)
spot_timeout = self._spot_timeouts.get(asg._id)
if timeout and datetime.datetime.now(timeout.tzinfo) < timeout:
return True
if spot_timeout and datetime.datetime.now(pytz.utc) < spot_timeout:
return True
return False
def cancel_spot_request(self, region, request_id):
client = self.session.client('ec2',
region_name=region)
response = client.describe_spot_instance_requests(
SpotInstanceRequestIds=[request_id]
)
if len(response['SpotInstanceRequests']) == 0:
return False
spot_instance_req = response['SpotInstanceRequests'][0]
if spot_instance_req['State'] in ('open', 'active'):
response = client.cancel_spot_instance_requests(
SpotInstanceRequestIds=[request_id]
)
logger.info('Spot instance request %s cancelled.', request_id)
return True
return False
def time_out_spot_asgs(self, asgs):
"""
Using recent spot pricing data from AWS, time out spot instance
ASGs that would be outbid for more than _MAX_OUTBIDS_IN_INTERVAL seconds
"""
region_instance_asg_map = {}
for asg in asgs:
if not asg.is_spot:
continue
instance_asg_map = region_instance_asg_map.setdefault(asg.region, {})
instance_type = asg.launch_config['InstanceType']
instance_asg_map.setdefault(instance_type, []).append(asg)
now = datetime.datetime.now(pytz.utc)
since = now - datetime.timedelta(seconds=self._SPOT_HISTORY_PERIOD)
for region, instance_asg_map in region_instance_asg_map.items():
# Expire old history
history = [item for item in self._spot_price_history.get(region, []) if item['Timestamp'] > since]
if history:
newest_spot_price = max(item['Timestamp'] for item in history)
else:
newest_spot_price = since
client = self.session.client('ec2', region_name=region)
kwargs = {
'StartTime': newest_spot_price,
'InstanceTypes': list(instance_asg_map.keys()),
'ProductDescriptions': ['Linux/UNIX']
}
history.extend(aws_utils.fetch_all(
client.describe_spot_price_history, kwargs, 'SpotPriceHistory'))
self._spot_price_history[region] = history
for instance_type, asgs in instance_asg_map.items():
for asg in asgs:
last_az_bid = {}
outbid_time = {}
bid_price = float(asg.launch_config['SpotPrice'])
for item in history:
if item['InstanceType'] != instance_type:
continue
if float(item['SpotPrice']) > bid_price:
# we would've been outbid!
if item['AvailabilityZone'] in last_az_bid:
time_diff = (last_az_bid[item['AvailabilityZone']] - item['Timestamp'])
else:
time_diff = datetime.timedelta(seconds=0)
outbid_time[item['AvailabilityZone']] = (
outbid_time.get(item['AvailabilityZone'], datetime.timedelta(seconds=0)) +
time_diff)
last_az_bid[item['AvailabilityZone']] = item['Timestamp']
if outbid_time:
avg_outbid_time = sum(t.total_seconds() for t in outbid_time.values()) / len(outbid_time)
else:
avg_outbid_time = 0.0
if avg_outbid_time > self._MAX_OUTBIDS_IN_INTERVAL:
self._spot_timeouts[asg._id] = now + datetime.timedelta(seconds=self._TIMEOUT)
logger.info('%s (%s) is spot timed out until %s (would have been outbid for %ss on average)',
asg.name, asg.region, self._spot_timeouts[asg._id], avg_outbid_time)
else:
self._spot_timeouts[asg._id] = None
class AutoScalingGroup(object):
provider = 'aws'
def __init__(self, client, region, kube_nodes, raw_group, launch_config):
"""
client - boto3 AutoScaling.Client
region - AWS region string
kube_nodes - list of KubeNode objects
raw_group - raw ASG dictionary returned from AWS API
launch_config - raw launch config dictionary returned from AWS API
"""
self.client = client
self.region = region
self.launch_config = launch_config
self.selectors = self._extract_selectors(region, launch_config, raw_group['Tags'])
self.name = raw_group['AutoScalingGroupName']
self.desired_capacity = raw_group['DesiredCapacity']
self.min_size = raw_group['MinSize']
self.max_size = raw_group['MaxSize']
self.is_spot = launch_config.get('SpotPrice') is not None
self.instance_type = launch_config['InstanceType']
self.instance_ids = set(inst['InstanceId'] for inst in raw_group['Instances']
if inst.get('InstanceId'))
self.nodes = [node for node in kube_nodes
if node.instance_id in self.instance_ids]
self.unschedulable_nodes = [n for n in self.nodes if n.unschedulable]
self.no_schedule_taints = {}
self._id = (self.region, self.name)
def _extract_selectors(self, region, launch_config, tags_data):
selectors = {
'aws/type': launch_config['InstanceType'],
'aws/class': launch_config['InstanceType'][0],
'aws/ami-id': launch_config['ImageId'],
'aws/region': region
}
for tag_data in tags_data:
if tag_data['Key'].startswith('kube/'):
selectors[tag_data['Key'][5:]] = tag_data['Value']
# adding kube label counterparts
selectors['beta.kubernetes.io/instance-type'] = selectors['aws/type']
selectors['failure-domain.beta.kubernetes.io/region'] = selectors['aws/region']
return selectors
def is_timed_out(self):
return False
@property
def global_priority(self):
return 0
@property
def actual_capacity(self):
return len(self.nodes)
def set_desired_capacity(self, new_desired_capacity):
"""
sets the desired capacity of the underlying ASG directly.
note that this is for internal control.
for scaling purposes, please use scale() instead.
"""
logger.info("ASG: {} new_desired_capacity: {}".format(
self, new_desired_capacity))
self.client.set_desired_capacity(AutoScalingGroupName=self.name,
DesiredCapacity=new_desired_capacity,
HonorCooldown=False)
self.desired_capacity = new_desired_capacity
return utils.CompletedFuture(True)
def scale(self, new_desired_capacity):
"""
scales the ASG to the new desired capacity.
returns a future with the result True if desired capacity has been increased.
"""
desired_capacity = min(self.max_size, new_desired_capacity)
num_unschedulable = len(self.unschedulable_nodes)
num_schedulable = self.actual_capacity - num_unschedulable
logger.info("Desired {}, currently at {}".format(
desired_capacity, self.desired_capacity))
logger.info("Kube node: {} schedulable, {} unschedulable".format(
num_schedulable, num_unschedulable))
# Try to get the number of schedulable nodes up if we don't have enough, regardless of whether
# group's capacity is already at the same as the desired.
if num_schedulable < desired_capacity:
for node in self.unschedulable_nodes:
if node.uncordon():
num_schedulable += 1
# Uncordon only what we need
if num_schedulable == desired_capacity:
break
if self.desired_capacity != desired_capacity:
if self.desired_capacity == self.max_size:
logger.info("Desired same as max, desired: {}, schedulable: {}".format(
self.desired_capacity, num_schedulable))
return utils.CompletedFuture(False)
scale_up = self.desired_capacity < desired_capacity
# This should be a rare event
# note: this micro-optimization is not worth doing as the race condition here is
# tricky. when ec2 initializes some nodes in the meantime, asg will shutdown
# nodes by its own policy
# scale_down = self.desired_capacity > desired_capacity >= self.actual_capacity
if scale_up:
# should have gotten our num_schedulable to highest value possible
# actually need to grow.
return self.set_desired_capacity(desired_capacity)
logger.info("Doing nothing: desired_capacity correctly set: {}, schedulable: {}".format(
self.name, num_schedulable))
return utils.CompletedFuture(False)
def scale_nodes_in(self, nodes):
"""
scale down asg by terminating the given node.
returns a future indicating when the request completes.
"""
for node in nodes:
try:
# if we somehow end up in a situation where we have
# more capacity than desired capacity, and the desired
# capacity is at asg min size, then when we try to
# terminate the instance while decrementing the desired
# capacity, the aws api call will fail
decrement_capacity = self.desired_capacity > self.min_size
self.client.terminate_instance_in_auto_scaling_group(
InstanceId=node.instance_id,
ShouldDecrementDesiredCapacity=decrement_capacity)
self.nodes.remove(node)
logger.info('Scaled node %s in', node)
except botocore.exceptions.ClientError as e:
if str(e).find("Terminating instance without replacement will "
"violate group's min size constraint.") == -1:
raise e
logger.error("Failed to terminate instance: %s", e)
return utils.CompletedFuture(None)
def contains(self, node):
return node.instance_id in self.instance_ids
def is_match_for_selectors(self, selectors):
for label, value in selectors.items():
if self.selectors.get(label) != value:
return False
return True
def is_taints_tolerated(self, pod):
for label, value in pod.selectors.items():
if self.selectors.get(label) != value:
return False
for key in self.no_schedule_taints:
if not (pod.no_schedule_wildcard_toleration or key in pod.no_schedule_existential_tolerations):
return False
return True
def __str__(self):
return 'AutoScalingGroup({name}, {selectors_hash})'.format(name=self.name, selectors_hash=utils.selectors_to_hash(self.selectors))
def __repr__(self):
return str(self)
class AutoScalingErrorMessages(object):
INSTANCE_LIMIT = re.compile(r'You have requested more instances \((?P<requested>\d+)\) than your current instance limit of (?P<limit>\d+) allows for the specified instance type. Please visit http://aws.amazon.com/contact-us/ec2-request to request an adjustment to this limit. Launching EC2 instance failed.')
VOLUME_LIMIT = re.compile(r'Instance became unhealthy while waiting for instance to be in InService state. Termination Reason: Client.VolumeLimitExceeded: Volume limit exceeded')
CAPACITY_LIMIT = re.compile(r'Insufficient capacity\. Launching EC2 instance failed\.')
SPOT_REQUEST_WAITING = re.compile(r'Placed Spot instance request: (?P<request_id>.+). Waiting for instance\(s\)')
SPOT_REQUEST_CANCELLED = re.compile(r'Spot instance request: (?P<request_id>.+) has been cancelled\.')
SPOT_LIMIT = re.compile(r'Max spot instance count exceeded\. Placing Spot instance request failed\.')
AZ_LIMIT = re.compile(r'We currently do not have sufficient .+ capacity in the Availability Zone you requested (.+)\.')
class AutoScalingCauseMessages(object):
LAUNCH_INSTANCE = re.compile(r'At \d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\dZ an instance was started in response to a difference between desired and actual capacity, increasing the capacity from (?P<original_capacity>\d+) to (?P<target_capacity>\d+)\.')
AZ_BALANCE = re.compile(r'An instance was launched to aid in balancing the group\'s zones\.')
| {
"content_hash": "8240d8d0a4fb461a61e0b5691a155b7c",
"timestamp": "",
"source": "github",
"line_count": 548,
"max_line_length": 312,
"avg_line_length": 45.17153284671533,
"alnum_prop": 0.5660499313242304,
"repo_name": "openai/kubernetes-ec2-autoscaler",
"id": "ca6a91374d0f42d1d65a8e42efbdf2623ec082e1",
"size": "24754",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "autoscaler/autoscaling_groups.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "409"
},
{
"name": "Python",
"bytes": "167393"
}
],
"symlink_target": ""
} |
Subsets and Splits