commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
96ed06f1f3dab3aa9d0f8150c41a5c1b943a86b0
Add test for config module
frappe/tests/test_config.py
frappe/tests/test_config.py
Python
0.000002
@@ -0,0 +1,678 @@ +# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors%0A# License: MIT. See LICENSE%0Aimport unittest%0A%0Aimport frappe%0Afrom frappe.config import get_modules_from_all_apps_for_user%0A%0A%0Aclass TestConfig(unittest.TestCase):%0A%09def test_get_modules(self):%0A%09%09frappe_modules = frappe.get_all(%22Module Def%22, filters=%7B%22app_name%22: %22frappe%22%7D, pluck=%22name%22)%0A%09%09all_modules_data = get_modules_from_all_apps_for_user()%0A%09%09first_module_entry = all_modules_data%5B0%5D%0A%09%09all_modules = %5Bx%5B%22module_name%22%5D for x in all_modules_data%5D%0A%09%09self.assertIn(%22links%22, first_module_entry)%0A%09%09self.assertIsInstance(all_modules_data, list)%0A%09%09self.assertFalse(%5Bx for x in frappe_modules if x not in all_modules%5D)%0A
d7bfed84d773e7ccbd23e910a533f70b4dd02184
Add module entrypoint
g2sd/__main__.py
g2sd/__main__.py
Python
0.000001
@@ -0,0 +1,58 @@ +from .g2sd import cmd%0A%0Aif __name__ == %22__main__%22:%0A cmd()%0A
7421cecfd6b304692eb19d76d3f90a61a950bc83
add get_reviewers
get_reviewers.py
get_reviewers.py
Python
0
@@ -0,0 +1,1473 @@ +%09%0A%0A import sys%0A import urllib2%0A import time%0A from lxml import html%0A %0A def get_reviewers(bookid, star=1):%0A allstar10_list = %5B%5D%0A for tag in %5B'collections', 'doings', 'wishes'%5D:%0A reached_end = False%0A i = 0%0A while not reached_end:%0A print %22start %25d%22 %25 i%0A page_url = %22http://book.douban.com/subject/%25s/%25s?start=%25d%22 %25 (bookid, tag, i)%0A response = urllib2.urlopen(page_url)%0A page_html = response.read()%0A tree = html.fromstring(page_html)%0A %0A reviews_element_list = tree.xpath('//*%5B@id=%22' + tag + '_tab%22%5D//table')%0A if len(reviews_element_list) %3C 20:%0A reached_end = True%0A %0A reviewer_list = tree.xpath('//*%5B@id=%22' + tag + '_tab%22%5D//table/tr/td/div%5B@class=%22pl2%22%5D/a')%0A reviewers = %5B el.attrib%5B'href'%5D for el in reviewer_list %5D%0A %0A review_list = tree.xpath('//*%5B@id=%22' + tag + '_tab%22%5D//table/tr/td/p%5B@class=%22pl%22%5D/span%5Blast()%5D')%0A reviews = %5B el.attrib%5B'class'%5D for el in review_list %5D%0A %0A review_stars = %22allstar%25d0%22 %25 star%0A allstar10_list.extend(%5Breviewer for (reviewer,review) in zip(reviewers, reviews) if review == review_stars%5D)%0A %0A i += 20%0A time.sleep(1)%0A %0A return allstar10_list%0A %0A if __name__ == %22__main__%22:%0A bookid = sys.argv%5B1%5D%0A allstar10_list = get_reviewers( bookid )%0A for i in allstar10_list:%0A print i%0A%0A
258a8d38d590f856e144b1e725fe38619c6758ea
Create notes_extractor.py
notes_extractor/notes_extractor.py
notes_extractor/notes_extractor.py
Python
0
@@ -0,0 +1,1977 @@ +#!/usr/bin/env python3%0A###############################################################################%0A# Name : extract_notes.py #%0A# Version : v. 1.0.0.0 #%0A# Author : Abel Gancsos #%0A# Description : Helps extract data about Apple Notes. #%0A###############################################################################%0Aimport os, sys, sqlite3;%0A%0Aclass INNote:%0A identifier=None;name=None;%0A def __init__(self, row=None):%0A if row != None:%0A self.identifier = row%5B0%5D;%0A self.name = row%5B1%5D;%0A pass;%0Aclass NotesExtractor:%0A notes_path=None;connection=None;cursor=None;%0A def __init__(self, params=dict()):%0A self.notes_path = params%5B%22-p%22%5D if %22-p%22 in params.keys() else %22%7B0%7D/Library/Group Containers/group.com.apple.notes/NoteStore.sqlite%22.format(os.environ%5B'HOME'%5D);%0A assert os.path.exists(self.notes_path), %22Notes cache must exist...%22;%0A self.connection = sqlite3.connect(self.notes_path);%0A self.cursor = self.connection.cursor();%0A def ensure_close(self):%0A self.connection.commit();%0A self.connection.close();%0A def search(self, keyword=%22%22):%0A notes = list();%0A self.cursor.execute(%22SELECT ZIDENTIFIER, ZTITLE1 FROM ZICCLOUDSYNCINGOBJECT WHERE ZTITLE1 LIKE '%25%7B0%7D%25'%22.format(keyword));%0A rows = self.cursor.fetchall();%0A for row in rows: notes.append(INNote(row));%0A return notes;%0A pass;%0A%0Aif __name__ == %22__main__%22:%0A params = dict();%0A for i in range(0, len(sys.argv) - 1): params%5Bsys.argv%5Bi%5D%5D = sys.argv%5Bi + 1%5D;%0A session = NotesExtractor(params);%0A notes = session.search(params%5B%22-n%22%5D if %22-n%22 in params.keys() else %22%22);%0A for note in notes: print(%22%7B1%7D%5Ct%5Ctnotes://showNote?identifier=%7B0%7D%22.format(note.identifier, note.name));%0A session.ensure_close();%0A
c0d135fc40142561e4a2409e47b34c367a6a7ef4
add script to read device logs from rms dump
util/scripts/devicelogs.py
util/scripts/devicelogs.py
Python
0
@@ -0,0 +1,734 @@ +from rmsdump import *%0D%0A%0D%0Adef read_log_entry (log_entry):%0D%0A return tuple(log_entry.val%5Bi%5D.val for i in range(0, 3))%0D%0A%0D%0Adef print_log (log_atom):%0D%0A print '%25s%3E %25s: %25s' %25 (log_atom%5B0%5D.strftime('%25Y-%25m-%25d %25H:%25M:%25S'), log_atom%5B1%5D, log_atom%5B2%5D)%0D%0A%0D%0Aif __name__ == %22__main__%22:%0D%0A data = sys.stdin.read()%0D%0A stream = DataStream(data)%0D%0A (rmses, num_rms, err) = extract_rms(stream)%0D%0A%0D%0A log_rmses = %5Brms for rms in rmses if rms%5B'name'%5D.startswith('LOG_') and rms%5B'name'%5D != 'LOG_IX'%5D%0D%0A log_entries = %5B%5D%0D%0A for log_rms in log_rmses:%0D%0A log_entries.extend(%5Brec%5B'content'%5D%5B1%5D for rec in log_rms%5B'records'%5D%5D)%0D%0A%0D%0A log_digest = %5Bread_log_entry(le) for le in log_entries%5D%0D%0A for la in sorted(log_digest, key=lambda la: la%5B0%5D):%0D%0A print_log(la)%0D%0A
f1ccab2168dea1b0827f4ca929f0036e84170a76
Add tests for cross domain xhr view
go/base/tests/test_views.py
go/base/tests/test_views.py
Python
0
@@ -0,0 +1,2059 @@ +%22%22%22Test for go.base.utils.%22%22%22%0A%0Afrom mock import patch, Mock%0Afrom django.core.urlresolvers import reverse%0A%0Afrom go.base.tests.utils import VumiGoDjangoTestCase%0A%0A%0Aclass BaseViewsTestCase(VumiGoDjangoTestCase):%0A def cross_domain_xhr(self, url):%0A return self.client.post(reverse('cross_domain_xhr'), %7B'url': url%7D)%0A%0A @patch('requests.get')%0A def test_cross_domain_xhr(self, mocked_get):%0A mocked_get.return_value = Mock(content='foo', status_code=200)%0A response = self.cross_domain_xhr('http://domain.com')%0A %5Bcall%5D = mocked_get.call_args_list%0A args, kwargs = call%0A self.assertEqual(args, ('http://domain.com',))%0A self.assertEqual(kwargs, %7B'auth': None%7D)%0A self.assertTrue(mocked_get.called)%0A self.assertEqual(response.content, 'foo')%0A self.assertEqual(response.status_code, 200)%0A%0A @patch('requests.get')%0A def test_basic_auth_cross_domain_xhr(self, mocked_get):%0A mocked_get.return_value = Mock(content='foo', status_code=200)%0A response = self.cross_domain_xhr('http://username:[email protected]')%0A %5Bcall%5D = mocked_get.call_args_list%0A args, kwargs = call%0A self.assertEqual(args, ('http://domain.com',))%0A self.assertEqual(kwargs, %7B'auth': ('username', 'password')%7D)%0A self.assertTrue(mocked_get.called)%0A self.assertEqual(response.content, 'foo')%0A self.assertEqual(response.status_code, 200)%0A%0A @patch('requests.get')%0A def test_basic_auth_cross_domain_xhr_with_https_and_port(self, mocked_get):%0A mocked_get.return_value = Mock(content='foo', status_code=200)%0A response = self.cross_domain_xhr(%0A 'https://username:[email protected]:443/foo')%0A %5Bcall%5D = mocked_get.call_args_list%0A args, kwargs = call%0A self.assertEqual(args, ('https://domain.com:443/foo',))%0A self.assertEqual(kwargs, %7B'auth': ('username', 'password')%7D)%0A self.assertTrue(mocked_get.called)%0A self.assertEqual(response.content, 'foo')%0A self.assertEqual(response.status_code, 200)%0A
b67cc70a6cf04e605ad93933dd9d8a88db94f093
add a simple flask app
backend/app.py
backend/app.py
Python
0.000002
@@ -0,0 +1,194 @@ +from flask import Flask%0Aimport db%0A%[email protected](%22/%22)%0Adef hello():%0A return %22Hello World!%22%0A%0Aif __name__ == %22__main__%22:%0A #db.process_db()%0A app = Flask(__name__)%0A app.run(debug=True)%0A %0A
2b25b9ba1c9417e3e25a91055a65551210eb5313
Add meal migrations
app/timetables/migrations/0002_meal.py
app/timetables/migrations/0002_meal.py
Python
0.000003
@@ -0,0 +1,682 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.7 on 2016-08-16 17:46%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('timetables', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Meal',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('name', models.CharField(max_length=60, unique=True)),%0A ('start_time', models.TimeField()),%0A ('end_time', models.TimeField()),%0A %5D,%0A ),%0A %5D%0A
835d9628513a80215641bc4c63eae1fae7b8442b
rewrite portforwarding api
xos/api/utility/portforwarding.py
xos/api/utility/portforwarding.py
Python
0.000002
@@ -0,0 +1,1921 @@ +from rest_framework.decorators import api_view%0Afrom rest_framework.response import Response%0Afrom rest_framework.reverse import reverse%0Afrom rest_framework import serializers%0Afrom rest_framework import generics%0Afrom rest_framework.views import APIView%0Afrom core.models import *%0Afrom django.forms import widgets%0Afrom django.core.exceptions import PermissionDenied%0Afrom xos.exceptions import XOSNotFound%0Afrom api.xosapi_helpers import PlusModelSerializer, XOSViewSet, ReadOnlyField%0Afrom django.db.models import Q%0A%0Aclass PortForwarding(Port):%0A class Meta:%0A proxy = True%0A app_label = %22core%22%0A%0A def __init__(self, *args, **kwargs):%0A super(PortForwarding, self).__init__(*args, **kwargs)%0A%0Aclass PortForwardingSerializer(serializers.Serializer):%0A id = serializers.CharField(read_only=True)%0A ip = serializers.CharField(read_only=True)%0A ports = serializers.CharField(read_only=True, source=%22network.ports%22)%0A hostname = serializers.CharField(read_only=True, source=%22instance.node.name%22)%0A%0A class Meta:%0A model = PortForwarding%0A fields = ('id', 'ip', 'ports', 'hostname')%0A%0Aclass PortForwardingViewSet(XOSViewSet):%0A base_name = %22list%22%0A method_name = %22portforwarding%22%0A method_kind = %22viewset%22%0A serializer_class = PortForwardingSerializer%0A%0A def get_queryset(self):%0A queryset = queryset=Port.objects.exclude(Q(network__isnull=True) %7C%0A Q(instance__isnull=True) %7C%0A Q(instance__node__isnull=True) %7C%0A Q(network__ports__exact='') %7C%0A Q(ip__isnull=True) %7C Q(ip__exact=''))%0A%0A node_name = self.request.query_params.get('node_name', None)%0A if node_name is not None:%0A queryset = queryset.filter(instance__node__name = node_name)%0A%0A return queryset%0A%0A%0A
33f43dd2e167afd40c4a5c516ae7cae35519b4c5
Add partial output for testcases to comet
judge/bridge/judgecallback.py
judge/bridge/judgecallback.py
import logging from .judgehandler import JudgeHandler from judge.models import Submission, SubmissionTestCase from judge.simple_comet_client import send_message logger = logging.getLogger('judge.bridge') class DjangoJudgeHandler(JudgeHandler): def finish(self): JudgeHandler.finish(self) for id in self._load: submission = Submission.objects.get(id=id) submission.status = 'IE' submission.save() def on_grading_begin(self, packet): JudgeHandler.on_grading_begin(self, packet) submission = Submission.objects.get(id=packet['submission-id']) submission.status = 'G' submission.save() send_message('sub_%d' % submission.id, 'grading-begin') def on_grading_end(self, packet): JudgeHandler.on_grading_end(self, packet) submission = Submission.objects.get(id=packet['submission-id']) time = 0 memory = 0 points = 0.0 total = 0 status = 0 status_codes = ['AC', 'WA', 'MLE', 'TLE', 'IR', 'RTE'] for case in SubmissionTestCase.objects.filter(submission=submission): time += case.time total += case.total points += case.points memory = max(memory, case.memory) i = status_codes.index(case.status) if i > status: status = i total = round(total, 1) points = round(points / total * submission.problem.points, 1) if not submission.problem.partial and points != total: points = 0 submission.status = 'D' submission.time = time submission.memory = memory submission.points = points submission.result = status_codes[status] submission.save() chan = 'sub_%d' % submission.id send_message(chan, 'grading-end %.3f %d %.1f %.1f %s' % (time, memory, points, submission.problem.points, submission.result)) def on_compile_error(self, packet): JudgeHandler.on_compile_error(self, packet) submission = Submission.objects.get(id=packet['submission-id']) submission.status = submission.result = 'CE' submission.save() send_message('sub_%d' % submission.id, 'compile-error %s' % packet['log']) def on_bad_problem(self, packet): JudgeHandler.on_bad_problem(self, packet) submission = Submission.objects.get(id=packet['submission-id']) submission.status = submission.result = 'IE' submission.save() send_message('sub_%d' % submission.id, 'bad-problem %s' % packet['problem']) def on_test_case(self, packet): JudgeHandler.on_test_case(self, packet) submission = Submission.objects.get(id=packet['submission-id']) test_case = SubmissionTestCase.objects.get_or_create(submission=submission, case=packet['position'])[0] status = packet['status'] if status & 2: test_case.status = 'RTE' elif status & 4: test_case.status = 'TLE' elif status & 8: test_case.status = 'MLE' elif status & 16: test_case.status = 'IR' elif status & 1: test_case.status = 'WA' else: test_case.status = 'AC' test_case.time = packet['time'] test_case.memory = packet['memory'] test_case.points = packet['points'] test_case.total = packet['total-points'] test_case.save() chan = 'sub_%d' % submission.id send_message(chan, 'test-case %d %s %.3f %d %.1f %.1f' % (packet['position'], test_case.status, packet['time'], packet['memory'], float(test_case.points), float(test_case.total)))
Python
0.000011
@@ -3629,16 +3629,21 @@ .1f %25.1f + (%25s) ' %25 (pac @@ -3890,11 +3890,29 @@ e.total) +, packet%5B'output'%5D ))%0A
f1826b2cf4c4103efe52713a57dc2fcabda1a45d
fix migration for real
kitsune/questions/migrations/0006_ios_questionlocale.py
kitsune/questions/migrations/0006_ios_questionlocale.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def create_questionlocale(apps, schema_editor): Product = apps.get_model('products', 'Product') QuestionLocale = apps.get_model('questions', 'QuestionLocale') p = Product.objects.get_or_create(slug='ios', defaults={ 'title': 'Firefox for iOS', 'description': 'Firefox for iPhone, iPad and iPod touch devices', 'display_order': 0, 'visible': False}) QuestionLocale.objects.get_or_create(locale='en-US', product=p) class Migration(migrations.Migration): dependencies = [ ('questions', '0005_change_locale_sr_Cyrl_to_sr'), ('products', '0001_initial'), ] operations = [ migrations.RunPython(create_questionlocale), ]
Python
0.000001
@@ -274,16 +274,25 @@ )%0A%0A p +, created = Produ @@ -504,24 +504,38 @@ False%7D)%0A%0A + ql, created = QuestionLoc @@ -578,18 +578,30 @@ -US' -, +)%0A ql. product -= +s.add( p)%0A%0A
c2e3e122560b8981079e1a89ff90fdf31c9eb8d1
Reset timer on push.
astm/protocol.py
astm/protocol.py
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # import logging from .asynclib import AsyncChat, call_later from .records import HeaderRecord, TerminatorRecord from .constants import STX, ENQ, ACK, NAK, EOT, ENCODING log = logging.getLogger(__name__) __all__ = ['ASTMProtocol'] class ASTMProtocol(AsyncChat): """Common ASTM protocol routines.""" #: ASTM header record class. astm_header = HeaderRecord #: ASTM terminator record class. astm_terminator = TerminatorRecord #: Flag about chunked transfer. is_chunked_transfer = None #: IO timer timer = None encoding = ENCODING strip_terminator = False _last_recv_data = None _last_sent_data = None def __init__(self, sock=None, map=None, timeout=None): super(ASTMProtocol, self).__init__(sock, map) if timeout is not None: self.timer = call_later(timeout, self.on_timeout) def found_terminator(self): while self.inbox: data = self.inbox.popleft() if not data: continue self.dispatch(data) def dispatch(self, data): """Dispatcher of received data.""" self._last_recv_data = data if data == ENQ: handler = self.on_enq elif data == ACK: handler = self.on_ack elif data == NAK: handler = self.on_nak elif data == EOT: handler = self.on_eot elif data.startswith(STX): # this looks like a message handler = self.on_message else: handler = lambda: self.default_handler(data) resp = handler() if resp is not None: self.push(resp) def default_handler(self, data): raise ValueError('Unable to dispatch data: %r', data) def push(self, data): self._last_sent_data = data return super(ASTMProtocol, self).push(data) def on_enq(self): """Calls on <ENQ> message receiving.""" def on_ack(self): """Calls on <ACK> message receiving.""" def on_nak(self): """Calls on <NAK> message receiving.""" def on_eot(self): """Calls on <EOT> message receiving.""" def on_message(self): """Calls on ASTM message receiving.""" def on_timeout(self): """Calls when timeout event occurs. Used to limit waiting time for response data.""" log.warn('Communication timeout') def handle_read(self): if self.timer is not None and not self.timer.cancelled: self.timer.reset() super(ASTMProtocol, self).handle_read() def handle_close(self): if self.timer is not None and not self.timer.cancelled: self.timer.cancel() super(ASTMProtocol, self).handle_close()
Python
0
@@ -2013,32 +2013,127 @@ ent_data = data%0A + if self.timer is not None and not self.timer.cancelled:%0A self.timer.reset()%0A return s
4178691ed3826239721f2d9a6435ef90cfb5cf82
Add color to input
flask_init/run.py
flask_init/run.py
#!/usr/bin/python # -*- coding:utf-8 -*- import os import six import templates from .creator import Creator from .exceptions import InvalidFolderName def main(): name = six.moves.input('Input project name (default is "flask_proj"): ') name = name or 'flask_proj' module = six.moves.input('Input module name (default is "common"): ') module = module or 'common' creator = Creator(os.getcwd()) try: creator.create_folder(creator.root_path, name) proj_path = os.path.join(creator.root_path, name) except InvalidFolderName: six.print_("\nInvalid Project Name, use another name!") else: creator.create_file(proj_path, "manage.py", templates.manager) creator.create_folder(proj_path, "requirements") creator.create_file(os.path.join(proj_path, "requirements"), "dev.txt", templates.requirements) app_init = templates.app_init.substitute(module=module) creator.create_module(proj_path, "app", app_init) app_path = os.path.join(proj_path, "app") creator.create_folder(app_path, "templates") template_path = os.path.join(app_path, "templates") creator.create_file(template_path, "base.html", templates.base_html) creator.create_folder(template_path, module) creator.create_file(os.path.join(template_path, module), "index.html", templates.module_html) module_init = templates.module_init.substitute(module=module) creator.create_folder(app_path, "static") creator.create_module(app_path, module, module_init) module_view = templates.module_views.substitute(module=module) module_path = os.path.join(app_path, module) creator.create_file(module_path, "views.py", module_view) creator.create_file(module_path, "models.py", templates.blank) six.print_("\n".join(creator.errors)) six.print_("You can install package " "\"pip install -r requirements/dev.txt\"") six.print_("You can run \"python manage.py run\"") if __name__ == '__main__': main()
Python
0.000002
@@ -155,26 +155,44 @@ def -main( +color_input(color, text ):%0A -name = +return six @@ -208,41 +208,165 @@ put( -'Input project name (default is %22 +color+text+'%5C033%5B0m')%0A%0A%0Adef color_print(color, text):%0A six.print_(color+text+'%5C033%5B0m')%0A%0A%0Adef main():%0A name = color_input(%22%5C033%5B35m%22, %22%3E Project name %5B flas @@ -371,21 +371,20 @@ ask_proj -%22): ' +%5D: %22 )%0A na @@ -426,67 +426,58 @@ e = -six.moves.input('Input module name (default is %22 +color_input(%22%5C033%5B35m%22, %22%3E Module name %5B common -%22): ' +%5D: %22 )%0A @@ -1951,27 +1951,40 @@ k)%0A%0A -six. +color_ print -_ ( +%22%5C033%5B31m%22, %22%5Cn%22.joi @@ -2011,63 +2011,130 @@ -six.print_(%22You can install package %22%0A %22%5C +color_print(%22%5C033%5B34m%22, %22Complete!%22)%0A six.print_(%22You can install package using %22, end=%22%22)%0A color_print(%22%5C033%5B34m%22, %22pip @@ -2165,18 +2165,16 @@ /dev.txt -%5C%22 %22)%0A s @@ -2196,17 +2196,61 @@ can run -%5C +using %22, end=%22%22)%0A color_print(%22%5C033%5B34m%22, %22python @@ -2266,10 +2266,8 @@ run -%5C%22 %22)%0A%0A
48933f27c098b05276271a62ed3c970e4d5721b0
add missing file
src/radical/repex/utils.py
src/radical/repex/utils.py
Python
0.000003
@@ -0,0 +1,1093 @@ +%0Aimport radical.utils as ru%0A%0A%0A# ------------------------------------------------------------------------------%0A#%0Adef expand_ln(to_link, src_sbox, tgt_sbox, rid, cycle):%0A%0A expand = %7B'rid' : rid,%0A 'cycle': cycle%7D%0A%0A if not src_sbox: src_sbox = '.'%0A if not tgt_sbox: tgt_sbox = '.'%0A%0A ret = list()%0A for data in ru.as_list(to_link):%0A src, tgt = data.split('%3E')%0A try:%0A src = src.strip() %25 expand%0A tgt = tgt.strip() %25 expand%0A except:%0A raise RuntimeError('expansion error: %25s : %25s : %25s' %25 (src, tgt, expand))%0A ret.append('%25s/%25s %3E %25s/%25s' %25 (src_sbox, src, tgt_sbox, tgt))%0A%0A return ret%0A%0A%0A# ------------------------------------------------------------------------------%0A#%0Adef last_task(replica):%0A%0A cs = replica.current_stage%0A%0A if cs %3E= len(replica.stages):%0A cs -= 1%0A%0A assert(cs %3C len(replica.stages))%0A tasks = replica.stages%5Bcs%5D.tasks%0A%0A assert(tasks)%0A assert(len(tasks) == 1)%0A%0A return list(tasks)%5B0%5D%0A%0A%0A# ------------------------------------------------------------------------------%0A%0A
88788c215c619ab894e21243d584541f311dbfb9
Add eventlet test check to new tests __init__.py
oslo_concurrency/tests/__init__.py
oslo_concurrency/tests/__init__.py
Python
0.000009
@@ -0,0 +1,704 @@ +# Copyright 2014 Red Hat, Inc.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Aimport os%0A%0Aif os.environ.get('TEST_EVENTLET'):%0A import eventlet%0A eventlet.monkey_patch()%0A
112c3a5a7728aea9be59b4bab1c26932e5faceaf
replace simple_api.py, set filename via commandline param, git add files that dont exist
import_fusion.py
import_fusion.py
Python
0
@@ -0,0 +1,2758 @@ +#!/usr/bin/python%0Aimport json%0Aimport requests%0Aimport sys%0Aimport codecs%0Aimport subprocess%0Afrom datetime import datetime%0Afrom optparse import OptionParser%0A%0A%0Aif __name__ == %22__main__%22:%0A%0A parser = OptionParser()%0A parser.add_option(%22-f%22, dest=%22output_file%22, help=%22output filename (will be stored under the data/ directory)%22)%0A%0A (options, args) = parser.parse_args()%0A%0A if not options.output_file:%0A print %22must include -f %3Coutput filename%3E%22%0A exit()%0A%0A OUTPUT_PATH = 'data/' + options.output_file%0A%0A%0A with open(%22api.json%22) as f:%0A keys = json.loads(f.read())%0A%0A server_key = keys%5B%22ServerKey%22%5D%0A tablename = keys%5B'fusion_table'%5D%0A endpoint = 'https://www.googleapis.com/fusiontables/v1/query?sql=SELECT * FROM '%0A apicall = %22%22.join(%5Bendpoint, tablename, %22&key=%22, server_key%5D)%0A %0A raw = requests.get(apicall)%0A if not raw.ok:%0A print(%22something wrong with the apicall%5Cn would print the requests object for inspection and debugging:%22)%0A print('dir:',dir(raw))%0A print('status code:',raw.status_code)%0A print('text:', raw.text)%0A sys.exit()%0A%0A data = raw.json()%0A %0A geojson = %7B%22type%22: %22FeatureCollection%22, %22features%22: %5B%5D%7D%0A for place in data%5B'rows'%5D:%0A geojson%5B'features'%5D.append(%0A %7B%0A %22geometry%22: %7B%0A %22type%22: %22Point%22,%0A %22coordinates%22: %5B%0A place%5B6%5D,%0A place%5B5%5D%0A %5D%0A %7D,%0A %22type%22: %22Feature%22,%0A %22properties%22: %7B%0A %22city%22: place%5B1%5D,%0A %22name%22: place%5B0%5D,%0A %22district%22:place%5B2%5D,%0A %22subdistrict%22:place%5B3%5D,%0A %22address%22:place%5B4%5D,%0A %22operator%22: place%5B16%5D,%0A %22days%22: %5B%0A place%5B8%5D,%0A place%5B9%5D,%0A place%5B10%5D,%0A place%5B11%5D,%0A place%5B12%5D,%0A place%5B13%5D%0A %5D,%0A %22phones%22 : place%5B7%5D,%0A %22notes%22: place%5B15%5D,%0A %22error%22 : place%5B17%5D%0A %7D%0A %7D%0A )%0A %0A with codecs.open(OUTPUT_PATH,'wb+', 'utf-8') as f:%0A output = %22stations=%22 + json.dumps(geojson, indent=4, ensure_ascii=False)%0A f.write(output)%0A f.close()%0A%0A subprocess.call(%5B'git', 'add', OUTPUT_PATH%5D)%0A subprocess.call(%5B'git','commit', OUTPUT_PATH, '-m', 'commiting updated geojson from Fusion table %25s' %25 datetime.now().strftime(%22%25d/%25m/%25Y %25H:%25M%22)%5D)%0A
43d23f19933e898254d58c4874e6f0c0ac3b1cc6
Add example config file
config-example.py
config-example.py
Python
0.000001
@@ -0,0 +1,2400 @@ +# Example configuration file for for Pyaiot%0A%0A# Configuration options are shared between all pyaiot components.%0A%0A%0A# Debug%0A# Enable debug logging for all components.%0A#debug = False%0A%0A# Broker host:%0A# Other component connect to this host for their broker connection. The%0A# dashboard passes this hostname to the clients for their broker connection.%0A#broker_host = 'localhost'%0A%0A# Broker port number:%0A# This is the tcp port number the websocket of the broker is listening on. Other%0A# component use this configuration options to determine which port number to%0A# connect to.%0A#broker_port = 8020%0A%0A# Key file%0A# The key file is necessary to authenticate different components to the broker.%0A# Both the broker and the other components use the path specified to find the%0A# key file for authentication.%0A#key_file = '~/.pyaiot/keys'%0A%0A# coap port%0A# The coap component listens on this port for CoAP messages from nodes%0A#coap_port = 5683%0A%0A# MQTT host%0A# The hostname of the MQTT broker. The mqtt component connects to this hostname%0A# for the MQTT broker connection.%0A#mqtt_host = 'localhost'%0A%0A# MQTT port%0A# The port the MQTT broker listens on. The MQTT component connects to this port%0A# on the MQTT broker.%0A#mqtt_port = 1886%0A%0A# Gateway port%0A# This port is used by the websocket gateway to listen on. Websocket nodes%0A# connect to this port to connect with the websocket gateway.%0A#gateway_port = 8001%0A%0A# max time%0A# Both the CoAP broker and the MQTT broker remove nodes from the broker after%0A# this many seconds without any messages from a node.%0A#max_time = 120%0A%0A# Web Port%0A# The web interface listens on this port for HTTP connections.%0A#web_port = 8080%0A%0A# Broker SSL%0A# When enabled, the URI to the broker is supplied with wss to indicate to use%0A# SSL to connect to the broker. Use this when you have a reverse proxy in front%0A# of the dashboard to handle SSL termination.%0A#broker_ssl=False%0A%0A# Camera URL%0A# The HTTP clients get this URL for their connection to webcam images. If None%0A# is configured, no webcam functionality is configured%0A#camera_url = None%0A%0A# Title%0A# The title of the web page.%0A#title = 'IoT Dashboard'%0A%0A# Logo%0A# The logo for the navbar of the dashboard. Should be an URL to the image. If%0A# None is configured, no logo is shown.%0A#logo = None%0A%0A# Favicon%0A# Optionally show a favicon on the dashboard. Should be an URL to an image. If%0A# None is configured, no favicon is passed to the web page.%0A#favicon = None%0A
b1caa89d75aecc564d504e5baffd0dc7619cd587
Create foursq_friends.py
foursq_friends.py
foursq_friends.py
Python
0.000028
@@ -0,0 +1,796 @@ +import json%0Afrom foursq_utils import *%0A%0Adef fetch_usr_friends(user_id):%0A super_token = 'QEJ4AQPTMMNB413HGNZ5YDMJSHTOHZHMLZCAQCCLXIX41OMP'%0A url = 'https://api.foursquare.com/v2/users/' + str(user_id) + '/friends?oauth_token=' + super_token + '&v=20210115'%0A try:%0A raw = get_raw_info(url)%0A data = json.loads(raw)%0A if data%5B'meta'%5D%5B'code'%5D != 200:%0A return -1%0A friends_info = data%5B'response'%5D%5B'friends'%5D%0A friendsUID = %5B%5D%0A if 'items' in friends_info.keys():%0A for item in friends_info%5B'items'%5D:%0A friendsUID.append(item%5B'id'%5D)%0A friends_info.setdefault('friendsUID',friendsUID)%0A else:%0A friends_info.setdefault('friendsUID', %5B%5D)%0A return friends_info%0A%0A except:%0A return -1%0A%0A%0A
139524072cc56d19ce887aaa95705dff8a952cc2
Add lc035_search_insert_position.py
lc035_search_insert_position.py
lc035_search_insert_position.py
Python
0.000002
@@ -0,0 +1,719 @@ +%22%22%22Leetcode 35. Search Insert Position%0AEasy%0A%0AURL: https://leetcode.com/problems/search-insert-position/%0A%0AGiven a sorted array and a target value, return the index if the target is found.%0AIf not, return the index where it would be if it were inserted in order.%0A%0AYou may assume no duplicates in the array.%0A%0AExample 1:%0AInput: %5B1,3,5,6%5D, 5%0AOutput: 2%0A%0AExample 2:%0AInput: %5B1,3,5,6%5D, 2%0AOutput: 1%0A%0AExample 3:%0AInput: %5B1,3,5,6%5D, 7%0AOutput: 4%0A%0AExample 4:%0AInput: %5B1,3,5,6%5D, 0%0AOutput: 0%0A%22%22%22%0A%0Aclass Solution(object):%0A def searchInsert(self, nums, target):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :type target: int%0A :rtype: int%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
1669cd22d6c8ee5bcb37c6770c98ddcf8848d901
Make lots of xyz with differing natoms visualizable
pad-trajectory.py
pad-trajectory.py
Python
0.000002
@@ -0,0 +1,1583 @@ +#!/usr/bin/env python3%0A#%0A# Script to generate an ext-xyz trajectory from individual ext-xyz files with varying atom numbers using ASE.%0A# Appens 'X' atoms at origin to obtain frames with equal lengths%0A# by Patrick Melix%0A# 2020/06/08%0A#%0A%0Afrom ase import io, Atom%0Aimport os%0A%0Adef main(inList, outFile='traj.xyz', outFormat='extxyz'):%0A #if output exists mv to .bak%0A if os.path.isfile(outFile):%0A print('ATTENTION: %7B:%7D exists, moving to *.bak'.format(outFile))%0A os.rename(outFile, outFile+'.bak')%0A%0A traj = %5B%5D%0A%0A for inFile in inList:%0A if not os.path.isfile(inFile):%0A raise ValueError('File %7B:%7D does not exist'.format(inFile))%0A%0A print(inFile)%0A traj.append(io.read(inFile))%0A%0A maxLen = max(%5Blen(frame) for frame in traj%5D)%0A%0A for i in range(len(traj)):%0A if len(traj%5Bi%5D) %3C maxLen:%0A for j in range(maxLen-len(traj%5Bi%5D)):%0A traj%5Bi%5D.append(Atom('X'))%0A%0A with open(outFile,'w') as f:%0A for frame in traj:%0A frame.write(f, format=outFormat)%0A return%0A%0A%0A#########################%0A# Functions%0A########################%0A%0A%0A%0A%0Aif __name__ == %22__main__%22:%0A import argparse%0A parser = argparse.ArgumentParser(description='Combine different lengths of XYZ')%0A parser.add_argument('--outformat', help='Output ASE Format', default='extxyz')%0A parser.add_argument('--outfile', help='Output File', default='traj.xyz')%0A parser.add_argument('-files', type=str, nargs='+', default=%5B%5D, help='All the XYZ Files')%0A args = parser.parse_args()%0A main(args.files, args.outfile, args.outformat)%0A%0A%0A
ac4d1cfc9cb6af0dea2196cdd4f0ca356e392062
Fix cross-device renames in PythonInterpreterCache.
src/python/twitter/pants/python/interpreter_cache.py
src/python/twitter/pants/python/interpreter_cache.py
# ================================================================================================== # Copyright 2013 Twitter, Inc. # -------------------------------------------------------------------------------------------------- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ================================================================================================== from __future__ import print_function import os from twitter.common.dirutil import safe_mkdir from twitter.common.python.http.link import SourceLink from twitter.common.python.http.link import EggLink from twitter.common.python.installer import EggInstaller from twitter.common.python.interpreter import ( PythonCapability, PythonIdentity, PythonInterpreter, ) from twitter.common.python.obtainer import Obtainer from .python_setup import PythonSetup from .resolver import crawler_from_config, fetchers_from_config from pkg_resources import Requirement # TODO(wickman) Create a safer version of this and add to twitter.common.dirutil def safe_link(src, dst): try: os.unlink(dst) except OSError: pass os.symlink(src, dst) def resolve_interpreter(config, interpreter, requirement, logger=print): """Given a :class:`PythonInterpreter` and :class:`Config`, and a requirement, return an interpreter with the capability of resolving that requirement or None if it's not possible to install a suitable requirement.""" interpreter_cache = PythonInterpreterCache.cache_dir(config) interpreter_dir = os.path.join(interpreter_cache, str(interpreter.identity)) if interpreter.satisfies(PythonCapability([requirement])): return interpreter def installer_provider(sdist): return EggInstaller(sdist, strict=requirement.key != 'setuptools', interpreter=interpreter) egg = resolve_and_link( config, requirement, os.path.join(interpreter_dir, requirement.key), installer_provider, logger=logger) if egg: return interpreter.with_extra(egg.name, egg.raw_version, egg.url) else: logger('Failed to resolve requirement %s for %s' % (requirement, interpreter)) def resolve_and_link(config, requirement, target_link, installer_provider, logger=print): if os.path.exists(target_link) and os.path.exists(os.path.realpath(target_link)): egg = EggLink(os.path.realpath(target_link)) if egg.satisfies(requirement): return egg fetchers = fetchers_from_config(config) crawler = crawler_from_config(config) obtainer = Obtainer(crawler, fetchers, []) obtainer_iterator = obtainer.iter(requirement) links = [link for link in obtainer_iterator if isinstance(link, SourceLink)] for link in links: logger(' fetching %s' % link.url) sdist = link.fetch() logger(' installing %s' % sdist) installer = installer_provider(sdist) dist_location = installer.bdist() target_location = os.path.join(os.path.dirname(target_link), os.path.basename(dist_location)) os.rename(dist_location, target_location) safe_link(target_location, target_link) logger(' installed %s' % target_location) return EggLink(target_location) # This is a setuptools <1 and >1 compatible version of Requirement.parse. # For setuptools <1, if you did Requirement.parse('setuptools'), it would # return 'distribute' which of course is not desirable for us. So they # added a replacement=False keyword arg. Sadly, they removed this keyword # arg in setuptools >= 1 so we have to simply failover using TypeError as a # catch for 'Invalid Keyword Argument'. def failsafe_parse(requirement): try: return Requirement.parse(requirement, replacement=False) except TypeError: return Requirement.parse(requirement) def resolve(config, interpreter, logger=print): """Resolve and cache an interpreter with a setuptools and wheel capability.""" setuptools_requirement = failsafe_parse( 'setuptools==%s' % config.getdefault('python-setup', 'setuptools_version', '2.2')) wheel_requirement = failsafe_parse( 'wheel==%s' % config.getdefault('python-setup', 'wheel_version', '0.22.0')) interpreter = resolve_interpreter(config, interpreter, setuptools_requirement, logger=logger) if interpreter: return resolve_interpreter(config, interpreter, wheel_requirement, logger=logger) class PythonInterpreterCache(object): @staticmethod def cache_dir(config): return PythonSetup(config).scratch_dir('interpreter_cache', default_name='interpreters') def __init__(self, config, logger=None): self._path = self.cache_dir(config) self._config = config safe_mkdir(self._path) self._interpreters = set() self._logger = logger or (lambda msg: True) @property def interpreters(self): return self._interpreters def interpreter_from_path(self, path): interpreter_dir = os.path.basename(path) identity = PythonIdentity.from_path(interpreter_dir) try: executable = os.readlink(os.path.join(path, 'python')) except OSError: return None interpreter = PythonInterpreter(executable, identity) return resolve(self._config, interpreter, logger=self._logger) def setup_interpreter(self, interpreter): interpreter_dir = os.path.join(self._path, str(interpreter.identity)) safe_mkdir(interpreter_dir) safe_link(interpreter.binary, os.path.join(interpreter_dir, 'python')) return resolve(self._config, interpreter) def setup_cached(self): for interpreter_dir in os.listdir(self._path): path = os.path.join(self._path, interpreter_dir) pi = self.interpreter_from_path(path) if pi: self._logger('Detected interpreter %s: %s' % (pi.binary, str(pi.identity))) self._interpreters.add(pi) def setup_paths(self, paths): for interpreter in PythonInterpreter.all(paths): identity_str = str(interpreter.identity) path = os.path.join(self._path, identity_str) pi = self.interpreter_from_path(path) if pi is None: self.setup_interpreter(interpreter) pi = self.interpreter_from_path(path) if pi is None: continue self._interpreters.add(pi) def matches(self, filters): for interpreter in self._interpreters: if any(interpreter.identity.matches(filt) for filt in filters): yield interpreter def setup(self, paths=(), force=False, filters=('',)): has_setup = False setup_paths = paths or os.getenv('PATH').split(os.pathsep) self.setup_cached() if force: has_setup = True self.setup_paths(setup_paths) matches = list(self.matches(filters)) if len(matches) == 0 and not has_setup: self.setup_paths(setup_paths) matches = list(self.matches(filters)) if len(matches) == 0: self._logger('Found no valid interpreters!') return matches def select_interpreter(self, compatibilities, allow_multiple=False): if allow_multiple: return compatibilities me = PythonInterpreter.get() if me in compatibilities: return [me] return [min(compatibilities)] if compatibilities else []
Python
0
@@ -942,16 +942,30 @@ mport os +%0Aimport shutil %0A%0Afrom t @@ -3496,16 +3496,18 @@ -os.renam +shutil.mov e(di @@ -5932,16 +5932,37 @@ erpreter +, logger=self._logger )%0A%0A def
900b09803f5c49b8645ba7f3d47eb17515061377
Create heads_and_legs.py
heads_and_legs.py
heads_and_legs.py
Python
0.000431
@@ -0,0 +1,284 @@ +#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Heads and Legs%0A#Problem level: 8 kyu%0A%0Adef animals(heads, legs):%0A if heads==0 and legs==0:%0A return (0,0)%0A y = legs//2 - heads%0A x = heads-y%0A if x%3C0 or y%3C0 or legs%252!=0:%0A return %22No solutions%22%0A return (x,y)%0A
fed98c8a9723c6fe18c123015b51714dc4ccdf68
add migrations
actual_play/migrations/0006_game_thumbnail.py
actual_play/migrations/0006_game_thumbnail.py
Python
0.000001
@@ -0,0 +1,502 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.1 on 2016-10-20 22:45%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('actual_play', '0005_auto_20161010_1313'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='game',%0A name='thumbnail',%0A field=models.ImageField(blank=True, null=True, upload_to='actual_play/image/%25Y/%25m/%25d'),%0A ),%0A %5D%0A
5c9ffc4a0ab9f8aed3071a0bf4ad0fc69070b628
Create inside_market.py
inside_market.py
inside_market.py
Python
0.000059
@@ -0,0 +1,2662 @@ +import pandas as pd%0Aimport numpy as np%0A%0A# update current state of our bid and ask%0A# iterate thru each trade and determine if a fill was generated%0A%0A# id%0A# price%0A# qty%0A# side - bid/ask%0A# status - live, canceled, rejected%0A%0ALIVE = 0%0ACANCELED = 1%0AREJECTED = 2%0AFILLED = 3%0APARTIAL = 4%0A%0ABID = 5%0AASK = 6%0A%0AMINUS_INF = -9999999%0APLUS_INF = 9999999%0A%0A# represents a limit order in our inside market%0Aclass order():%0A id = 0%0A def __init__(self, price, qty, side, status):%0A self.id = order.id%0A order.id += 1%0A self.price = price%0A self.qty = qty%0A self.side = side%0A self.status = status%0A%0A def cancel(self):%0A self.status = CANCELED%0A%0A def modify(self, new_price, new_qty = -1):%0A self.price = new_price%0A if new_qty %3E 0:%0A self.qty = new_qty%0A%0A def evaluate(self, trade_price):%0A if self.side == BID:%0A if trade_price %3C self.price:%0A self.status = FILLED%0A return True, self.price%0A else:%0A return False, 0.0%0A else:%0A if trade_price %3E self.price:%0A self.status = FILLED%0A return True, self.price%0A else:%0A return False, 0.0%0A %0Aclass inside_market():%0A def __init__(self, bid_price, ask_price):%0A if bid_price %3C ask_price:%0A self.bid = order(bid_price, 1, BID, LIVE)%0A self.ask = order(ask_price, 1, ASK, LIVE)%0A self.status = 1%0A else:%0A self.status = -1%0A%0A def update(self, side, new_price):%0A if side == BID:%0A if new_price %3C self.ask.price:%0A self.bid.price = new_price%0A return True, %22MODIFIED ORDER ID = %22, self.bid.id%0A else:%0A return False, %22FAILED TO MODIFY ORDER ID = %22, self.bid.id, %22 RESULTING BID WOULD HAVE CROSSED OUR ASK%22%0A else:%0A if new_price %3E self.bid.price:%0A self.ask.price = new_price%0A return True, %22MODIFIED ORDER ID = %22, self.ask.id%0A else:%0A return False, %22FAILED TO MODIFY ORDER ID = %22, self.bid.id, %22 RESULTING ASK WOULD HAVE CROSSED OUR BID%22%0A %0A def evaluate(self, trade_price):%0A bid_fill, bid_fill_price = self.bid.evaluate(trade_price)%0A ask_fill, ask_fill_price = self.ask.evaluate(trade_price)%0A if bid_fill == True:%0A return BID, bid_fill_price%0A elif ask_fill == True:%0A return ASK, ask_fill_price%0A else:%0A return None, 0.0%0A%0A def shift(self, increment):%0A self.bid.price += increment%0A self.ask.price += increment%0A%0A def exit(self, side, increment):%0A if side == BID:%0A # shift the bid down to minus_inf to not buy anymore%0A self.bid.price = MINUS_INF%0A self.ask.price -= increment%0A else:%0A # shift the ask up to plus_inf to not sell anymore%0A self.ask.price = PLUS_INF%0A self.bid.price += increment%0A
0dd3894fb8816f6f904e5c7d204ab2672b304588
Add earth mesh module
gravity_waves/earth_mesh.py
gravity_waves/earth_mesh.py
Python
0
@@ -0,0 +1,1202 @@ +from __future__ import absolute_import, print_function, division%0A%0Afrom firedrake import *%0A%0A%0A__all__ = %5B%22generate_earth_mesh%22%5D%0A%0A%0Adef generate_earth_mesh(r_level, num_layers, thickness, hexes=False):%0A %22%22%22Generates an Earth-like spherical mesh for the gravity wave%0A problem.%0A%0A :arg r_level: An %60%60int%60%60 denoting the number of refinement%0A levels.%0A :arg num_layers: An %60%60int%60%60 denoting the number of mesh layers.%0A :arg thickness: The thickness of the spherical shell (in meters).%0A :arg hexes: A %60%60bool%60%60 indicating whether to generate a hexahedral mesh.%0A%0A Returns: A Firedrake extruded spherical mesh.%0A %22%22%22%0A%0A earth_radius = 6.371e6%0A layer_height = thickness / num_layers%0A%0A if hexes:%0A spherical_base = CubedSphereMesh(earth_radius,%0A refinement_level=r_level)%0A else:%0A spherical_base = IcosahedralSphereMesh(earth_radius,%0A refinement_level=r_level)%0A%0A earth_mesh = ExtrudedMesh(spherical_base, layers=num_layers,%0A layer_height=layer_height,%0A extrusion_type=%22radial%22)%0A return earth_mesh%0A
5578d11f45e9c41ab9c4311f2bed48b9c24d9bf5
Create file for Nonterminal have method
tests/grammar_term-nonterm_test/NonterminalHaveTest.py
tests/grammar_term-nonterm_test/NonterminalHaveTest.py
Python
0.000001
@@ -0,0 +1,111 @@ +#!/usr/bin/env python%0A%22%22%22%0A:Author Patrik Valkovic%0A:Created 23.06.2017 16:39%0A:Licence GNUv3%0APart of grammpy%0A%0A%22%22%22
b40c6ce73c439e7d74b867702fdd2c4cd7ad8b15
add testrunner to automactically create/delete a test db during python and django tests.
couchdbkit/ext/django/testrunner.py
couchdbkit/ext/django/testrunner.py
Python
0
@@ -0,0 +1,2802 @@ +from django.test.simple import DjangoTestSuiteRunner%0Afrom django.conf import settings%0Afrom couchdbkit.ext.django import loading as loading%0Afrom couchdbkit.resource import ResourceNotFound%0A%0Aclass CouchDbKitTestSuiteRunner(DjangoTestSuiteRunner):%0A %22%22%22%0A A test suite runner for couchdbkit. This offers the exact same functionality%0A as the default django test suite runner, except that it connects all the couchdbkit%0A django-extended models to a test database. The test database is deleted at the%0A end of the tests. To use this, just add this file to your project and the following %0A line to your settings.py file:%0A %0A TEST_RUNNER = 'myproject.testrunner.CouchDbKitTestSuiteRunner'%0A %22%22%22%0A %0A dbs = %5B%5D%0A %0A def get_test_db_name(self, dbname):%0A return %22%25s_test%22 %25 dbname%0A %0A def setup_databases(self, **kwargs):%0A print %22overridding the couchdbkit database settings to use a test database!%22%0A %0A # first pass: just implement this as a monkey-patch to the loading module%0A # overriding all the existing couchdb settings%0A self.dbs = %5B(app, self.get_test_db_name(url)) for app, url in getattr(settings, %22COUCHDB_DATABASES%22, %5B%5D)%5D%0A old_handler = loading.couchdbkit_handler%0A couchdbkit_handler = loading.CouchdbkitHandler(self.dbs)%0A loading.couchdbkit_handler = couchdbkit_handler%0A loading.register_schema = couchdbkit_handler.register_schema%0A loading.get_schema = couchdbkit_handler.get_schema%0A loading.get_db = couchdbkit_handler.get_db%0A %0A # register our dbs with the extension document classes%0A for app, value in old_handler.app_schema.items():%0A for name, cls in value.items():%0A cls.set_db(loading.get_db(app))%0A %0A %0A return super(CouchDbKitTestSuiteRunner, self).setup_databases(**kwargs)%0A %0A def teardown_databases(self, old_config, **kwargs):%0A deleted_databases = %5B%5D%0A skipcount = 0%0A for app, item in self.dbs:%0A app_label = app.split('.')%5B-1%5D%0A db = loading.get_db(app_label)%0A if db.dbname in deleted_databases: %0A skipcount += 1%0A continue%0A try:%0A db.server.delete_db(db.dbname)%0A deleted_databases.append(db.dbname)%0A print %22deleted database %25s for %25s%22 %25 (db.dbname, app_label)%0A except ResourceNotFound:%0A print %22database %25s not found for %25s! it was probably already deleted.%22 %25 (db.dbname, app_label)%0A if skipcount:%0A print %22skipped deleting %25s app databases that were already deleted%22 %25 skipcount%0A return super(CouchDbKitTestSuiteRunner, self).teardown_databases(old_config, **kwargs)
913c9a10b2eb3b3d9de108a82a3251b2c0de0e10
Add test for Hostname object
cybox/test/objects/hostname_test.py
cybox/test/objects/hostname_test.py
Python
0
@@ -0,0 +1,565 @@ +# Copyright (c) 2014, The MITRE Corporation. All rights reserved.%0A# See LICENSE.txt for complete terms.%0A%0Aimport unittest%0A%0Afrom cybox.objects.hostname_object import Hostname%0Afrom cybox.test.objects import ObjectTestCase%0A%0A%0Aclass TestHostname(ObjectTestCase, unittest.TestCase):%0A object_type = %22HostnameObjectType%22%0A klass = Hostname%0A%0A _full_dict = %7B%0A 'is_domain_name': True,%0A 'hostname_value': %22www.example.com%22,%0A 'naming_system': %5B%22DNS%22, %22NETBIOS%22%5D,%0A 'xsi:type': object_type,%0A %7D%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
514aca20c6f076a86819d7180f36c3b2e8bcc33b
Add integration test checking compatibility of Keras models with TF optimizers.
tests/integration_tests/test_tensorflow_integration.py
tests/integration_tests/test_tensorflow_integration.py
Python
0
@@ -0,0 +1,1521 @@ +from __future__ import print_function%0A%0Aimport os%0Aimport tempfile%0Aimport pytest%0Aimport keras%0Afrom keras import layers%0Afrom keras.utils.test_utils import get_test_data%0Afrom keras.utils.test_utils import keras_test%0A%0A%[email protected](keras.backend.backend() != 'tensorflow', reason='Requires TF backend')%0A@keras_test%0Adef test_tf_optimizer():%0A import tensorflow as tf%0A%0A num_hidden = 10%0A output_dim = 2%0A input_dim = 10%0A target = 0.8%0A optimizer = tf.train.AdadeltaOptimizer(%0A learning_rate=1., rho=0.95, epsilon=1e-08)%0A%0A (x_train, y_train), (x_test, y_test) = get_test_data(%0A num_train=1000, num_test=200,%0A input_shape=(input_dim,),%0A classification=True, num_classes=output_dim)%0A%0A model = keras.Sequential()%0A model.add(layers.Dense(num_hidden,%0A activation='relu',%0A input_shape=(input_dim,)))%0A model.add(layers.Dense(output_dim, activation='softmax'))%0A%0A model.compile(loss='sparse_categorical_crossentropy',%0A optimizer=optimizer,%0A metrics=%5B'accuracy'%5D)%0A history = model.fit(x_train, y_train, epochs=8, batch_size=16,%0A validation_data=(x_test, y_test), verbose=2)%0A assert history.history%5B'val_acc'%5D%5B-1%5D %3E= target%0A%0A # Test saving.%0A _, fname = tempfile.mkstemp('.h5')%0A model.save(fname)%0A model = keras.models.load_model(fname)%0A assert len(model.weights) == 4%0A os.remove(fname)%0A%0A%0Aif __name__ == '__main__':%0A pytest.main(%5B__file__%5D)%0A
3840fbe6ca33e48b9bdbd78e85830a13606f612c
Create efi-smc.py
efi-smc.py
efi-smc.py
Python
0.000001
@@ -0,0 +1,1698 @@ +#!/usr/bin/python%0Afrom lxml import html%0Aimport requests%0A%0A# Get the EFI/SMC table from Apple's Website%0Apage = requests.get('http://support.apple.com/en-us/HT1237')%0Atree = html.fromstring(page.text)%0A%0A# Count the number of rows which will be used in looping%0Arows = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr')%0A%0A# For each row:%0Afor i in range(len(rows)):%0A%09# Get the friendly name, model, EFI version, SMC version, and the download URLs%0A%09friendly_name = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B1%5D/text()' %25 locals())%0A%09model = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B2%5D/p/text()' %25 locals())%0A%09efi_version = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B3%5D/p/a/text()' %25 locals())%0A%09efi_url = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B3%5D/td%5B3%5D/p/a/@href' %25 locals())%0A%09smc_version = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B4%5D/p/a/text()' %25 locals())%0A%09smc_url = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B4%5D/a/@href' %25 locals())%0A%09%0A%09# Print everything in a human-readable format%0A%09if not friendly_name:%0A%09%09continue%0A%09else:%0A%09%09print friendly_name%5B0%5D%0A%09%09%0A%09if not model:%0A%09%09model = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B2%5D/text()' %25 locals())%0A%09%09print model%5B0%5D%0A%09else:%0A%09%09print model%5B0%5D%0A%09%09%0A%09if not efi_version:%0A%09%09efi_version = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B3%5D/a/text()' %25 locals())%0A%09%09if not efi_version:%0A%09%09%09print 'No EFI'%0A%09%09else:%0A%09%09%09print efi_version%5B0%5D%0A%09%09%09print efi_url%5B0%5D%0A%09else:%0A%09%09print efi_version%5B0%5D%0A%09%09%0A%09if not smc_version:%0A%09%09smc_version = tree.xpath('//*%5B@id=%22kbtable%22%5D/tbody/tr%5B%25(i)s%5D/td%5B4%5D/a/text()' %25 locals())%0A%09%09if not smc_version:%0A%09%09%09print 'No SMC'%0A%09%09else:%0A%09%09%09print smc_version%5B0%5D%0A%09%09%09print smc_url%5B0%5D%0A%09else:%0A%09%09print smc_version%5B0%5D%0A%09print '%5Cn'%0A
4c23b6b75f64698fa40a263760fea8d7648ff6d6
Add self as argument
calibrate_sense_hat/calibrate_sense_hat.py
calibrate_sense_hat/calibrate_sense_hat.py
#!/usr/bin/python import os from PIL import Image # pillow from sense_hat import SenseHat class BlxSenseHat(object): def __init__( self, text_assets='calibrate_sense_hat_text' ): self._text_dict = {} # Load text assets dir_path = os.path.dirname(__file__) self._load_text_assets( os.path.join(dir_path, '%s.png' % text_assets), os.path.join(dir_path, '%s.txt' % text_assets) ) self._sense_hat = SenseHat() #### # Text assets #### # Text asset files are rotated right through 90 degrees to allow blocks of # 32 contiguous pixels to represent one 4 x 8 character. These are stored # in a 8 x 380 pixel png image with characters arranged adjacently # Consequently we must rotate the pixel map left through 90 degrees to # compensate when drawing text def _load_text_assets(self, text_image_file, text_file): """ Internal. Builds a character indexed dictionary of pixels used by the show_message function below """ text_pixels = self.load_image(text_image_file, False) with open(text_file, 'r') as f: loaded_text = f.read() self._text_dict = {} for index, s in enumerate(loaded_text): start = index * 32 end = start + 32 char = text_pixels[start:end] self._text_dict[s] = char def load_image(self, file_path, redraw=True): """ Accepts a path to an 4 x 8 image file and updates the LED matrix with the image """ if not os.path.exists(file_path): raise IOError('%s not found' % file_path) img = Image.open(file_path).convert('RGB') pixel_list = list(map(list, img.getdata())) if redraw: self.set_pixels(pixel_list) return pixel_list def _get_char_pixels(self, s): """ Internal. Safeguards the character indexed dictionary for the show_message function below """ if len(s) == 1 and s in self._text_dict.keys(): return list(self._text_dict[s]) else: return list(self._text_dict['?']) # def display_message( self, # text_string, # x_pos=0, # y_pos=0, # text_colour=[255, 255, 255], # back_colour=[0, 0, 0] # ): # """ # Sets a string of text on the LED matrix at the specified # location and colours # """ # display_pixels = [] # for s in text_string: # display_pixels.extend(self._get_char_pixels(s)) # # Recolour pixels as necessary # coloured_pixels = [ # text_colour if pixel == [255, 255, 255] else back_colour # for pixel in display_pixels # ] def show_message(text_string,scroll_speed=.1, text_colour=[255, 255, 255], back_colour=[0, 0, 0]): """ Scrolls a string of text across the LED matrix using the specified speed and colours """ scroll_pixels = [] string_padding = [[0, 0, 0]] * 64 scroll_pixels.extend(string_padding) for s in text_string: scroll_pixels.extend(get_char_pixels(s)) scroll_pixels.extend(string_padding) # Shift right by 8 pixels per frame to scroll scroll_length = len(scroll_pixels) // 8 for i in range(scroll_length - 8): start = i * 8 end = start + 64 self._sense_hat.set_pixels(scroll_pixels[start:end]) time.sleep(scroll_speed) # def set_pixels(self, pixel_list): # """ # Accepts a list containing 64 smaller lists of [R,G,B] pixels and # updates the LED matrix. R,G,B elements must intergers between 0 # and 255 # """ # if len(pixel_list) != 64: # raise ValueError('Pixel lists must have 64 elements') # for index, pix in enumerate(pixel_list): # if len(pix) != 3: # raise ValueError('Pixel at index %d is invalid. Pixels must contain 3 elements: Red, Green and Blue' % index) # for element in pix: # if element > 255 or element < 0: # raise ValueError('Pixel at index %d is invalid. Pixel elements must be between 0 and 255' % index) # with open(self._fb_device, 'wb') as f: # map = self._pix_map[self._rotation] # for index, pix in enumerate(pixel_list): # # Two bytes per pixel in fb memory, 16 bit RGB565 # f.seek(map[index // 8][index % 8] * 2) # row, column # f.write(self._pack_bin(pix))
Python
0.999699
@@ -2902,24 +2902,30 @@ how_message( +self, text_string,
cdc6b62400f66d1b2747b5668a6618c961deb962
create game class
powerball/game.py
powerball/game.py
Python
0
@@ -0,0 +1,534 @@ +#!/usr/bin/env python%0A%0Afrom collections import Counter%0Afrom .player import Player%0A%0Aclass Game:%0A%0A def __init__(self, players=None):%0A %22%22%22%0A Initialize the game instance.%0A players may be initialized by argument or by calling the begin method.%0A winning_numbers is initialized with an empty list. It should be populated%0A by the generate_winning_numbers method when called.%0A%0A :param players (list): list of players%0A %22%22%22%0A%0A self.players = list()%0A self.winning_numbers = list()%0A
b5d1be9069507feaeb41cfcf9cd774a244ffe49c
Add Activity model
sqlalchemy_continuum/ext/activity_stream.py
sqlalchemy_continuum/ext/activity_stream.py
Python
0.000001
@@ -0,0 +1,973 @@ +import sqlalchemy as sa%0Afrom sqlalchemy_utils import generic_relationship, JSONType%0A%0A%0Aclass Activity(object):%0A @declared_attr%0A def actor_id(self):%0A return sa.Column(%0A sa.Integer,%0A sa.ForeignKey('user.id'),%0A index=True%0A )%0A%0A @declared_attr%0A def actor(self):%0A return sa.orm.relationship('User')%0A%0A verb = sa.Column(sa.Unicode(255))%0A%0A data = sa.Column(JSONType)%0A%0A # This is used to discriminate between the linked tables.%0A object_type = sa.Column(sa.Unicode(255))%0A%0A # This is used to point to the primary key of the linked row.%0A object_id = sa.Column(sa.Integer)%0A%0A object = generic_relationship(object_type, object_id)%0A%0A # This is used to discriminate between the linked tables.%0A target_type = sa.Column(sa.Unicode(255))%0A%0A # This is used to point to the primary key of the linked row.%0A target_id = sa.Column(sa.Integer)%0A%0A target = generic_relationship(target_type, target_id)%0A
98b738e21918d1b6c4f2193cf229c518c9913974
add standalone affordance server script
src/python/scripts/affordanceServer.py
src/python/scripts/affordanceServer.py
Python
0
@@ -0,0 +1,1045 @@ +from ddapp import consoleapp%0Afrom ddapp import lcmobjectcollection%0Afrom ddapp.timercallback import TimerCallback%0Aimport datetime%0A%0Adef main():%0A%0A app = consoleapp.ConsoleApp()%0A%0A meshCollection = lcmobjectcollection.LCMObjectCollection('MESH_COLLECTION_COMMAND')%0A affordanceCollection = lcmobjectcollection.LCMObjectCollection('AFFORDANCE_COLLECTION_COMMAND')%0A%0A meshCollection.sendEchoRequest()%0A affordanceCollection.sendEchoRequest()%0A%0A def printCollection():%0A print%0A print '----------------------------------------------------'%0A print datetime.datetime.now().strftime('%25Y-%25m-%25d %25H:%25M:%25S')%0A print '%25d affordances' %25 len(affordanceCollection.collection)%0A for desc in affordanceCollection.collection.values():%0A print%0A print 'name:', desc%5B'Name'%5D%0A print 'type:', desc%5B'classname'%5D%0A%0A%0A timer = TimerCallback(targetFps=0.2)%0A timer.callback = printCollection%0A timer.start()%0A%0A #app.showPythonConsole()%0A app.start()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
8e4d60645fb45e37c7a947b3a86219e5fd15c194
Add py-geeup package (#12367)
var/spack/repos/builtin/packages/py-geeup/package.py
var/spack/repos/builtin/packages/py-geeup/package.py
Python
0
@@ -0,0 +1,1691 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyGeeup(PythonPackage):%0A %22%22%22Simple Client for Earth Engine Uploads with Selenium Support.%22%22%22%0A%0A homepage = %22https://github.com/samapriya/geeup%22%0A url = %22https://pypi.io/packages/source/g/geeup/geeup-0.2.4.tar.gz%22%0A%0A version('0.2.4', sha256='20f62306ea900d7fa28a97cc92204716212dc030c50a6ac8214772a61a1a83fe')%0A%0A depends_on('[email protected]:', type='build')%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'test'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-pysmartdl', type=('build', 'run'))%0A depends_on('[email protected]', type=('build', 'run'), when='%5Epython@:3.3')%0A depends_on('[email protected]:', type=('build', 'run'), when='%[email protected]:')%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A
eb4bcaf1a94963bc1af697180a31a48a84333eb6
Add exclusive state for /* */ comments to the curly lexer. Seems to fix leaf node comments too.
libraries/vyconf/configfile/curly/lexer.py
libraries/vyconf/configfile/curly/lexer.py
# vyconf.configfile.curly.lexer: lexer for the curly config # # Copyright (C) 2014 VyOS Development Group <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA import ply.lex as lex class Lexer(object): tokens = ( 'LBRACE', 'RBRACE', 'IDENTIFIER', 'STRING', 'NODE_COMMENT', 'SEMICOLON', 'NEWLINE' ) t_LBRACE = r'\{' t_RBRACE = r'\}' t_SEMICOLON = r';' # TODO: add multiline comment support def t_NODE_COMMENT(self, t): r'/\*(.*)\*/' str = t.value[2:-2] # Strip off /* and */ str = str.strip() t.value = str return t # Define a rule so we can track line numbers def t_NEWLINE(self, t): r'\n+' t.lexer.lineno += len(t.value) return t def t_IDENTIFIER(self, t): r'[^\s;{}\"\']+' return t def t_STRING(self, t): r'\"([^\\"]|(\\.))*\"' escaped = 0 str = t.value[1:-1] new_str = "" for i in range(0, len(str)): c = str[i] if escaped: if c == "n": c = "\n" elif c == "t": c = "\t" new_str += c escaped = 0 else: if c == "\\": escaped = 1 else: new_str += c t.value = new_str return t t_ignore = ' \t\n' # Error handling rule def t_error(self, t): print("Illegal character '{0}'".format(t.value[0])) t.lexer.skip(1) # Build the lexer def build(self, **kwargs): self.lexer = lex.lex(module=self, **kwargs) def input(self, text): self.lexer.input(text) def token(self): self.last_token = self.lexer.token() return self.last_token
Python
0
@@ -941,16 +941,168 @@ ject):%0A%0A + # Multiline comment can't be extracted with regex,%0A # so we have exclusive state for it%0A states = (%0A ('COMMENT', 'exclusive'),%0A )%0A%0A toke @@ -1331,43 +1331,184 @@ # -TODO: add multiline comment support +/* */ comment. This is a bit complicated.%0A # VyConf is supposed to store node comments along with nodes%0A # and display them in the config etc., that's why all the hassle%0A %0A @@ -1514,21 +1514,16 @@ def t_ -NODE_ COMMENT( @@ -1549,67 +1549,366 @@ '/%5C* -(.*)%5C*/'%0A%0A str = t.value%5B2:-2%5D # Strip off /* and +'%0A t.lexer.code_start = t.lexer.lexpos%0A t.lexer.level = 1%0A t.lexer.begin('COMMENT')%0A%0A t_COMMENT_ignore = '%5Cn'%0A%0A def t_COMMENT_anything(self, t):%0A r'(%5Cs%7C%5Cw)+'%0A%0A def t_COMMENT_error(self, t):%0A print(%22Illegal character '%7B0%7D'%22.format(t.value%5B0%5D))%0A t.lexer.skip(1)%0A%0A def t_COMMENT_end(self, t):%0A r'%5C */ +' %0A @@ -1916,64 +1916,264 @@ +tmp_ str = -str.strip()%0A t.value = str%0A return t +t.lexer.lexdata%5Bt.lexer.code_start:t.lexer.lexpos-2%5D%0A t.value = tmp_str.strip()%0A t.type = %22NODE_COMMENT%22%0A t.lexer.lineno += t.value.count('%5Cn')%0A t.lexer.begin('INITIAL')%0A return t%0A # The comment stuff is over %0A%0A @@ -2961,17 +2961,16 @@ turn t%0A%0A -%0A t_ig
c50628d1cf984be774cdf1bc6728b9c1cb3f94fa
Create Assignment2Solution.py
Assignments/Assignment2Solution.py
Assignments/Assignment2Solution.py
Python
0
@@ -0,0 +1,1065 @@ +# Your name here%0A# Assignment 2: Process a folder of shapefiles%0A# Using the os library, find all shapefiles,and only shapefiles in a given folder and buffer them as before.%0A# Catch exceptions to handle invalid shapefiles.%0Aimport arcpy%0Aimport os%0A%0Adef main(inputfolder,prefix,outputfolder):%0A %22%22%22Buffer all shapefiles in inputfolder, appending with prefix and output to outputfolder.%22%22%22%0A filelist = os.listdir(inf)%0A for f in filelist:%0A if f.endswith('.shp'):%0A try:%0A input = inputfolder + f%0A output = outputfolder + prefix + f%0A arcpy.Buffer_analysis (input, output, u'500 Feet')%0A except Exception as e:%0A print %22Unable to buffer%22, f%0A print e%0A return outputfolder%0A%0Aif __name__ == '__main__':%0A # Arguments must be supplied in the __main__ block, not in the function called.%0A inf = u'C:%5C%5CFacilities%5C%5C'%0A p = u'Buffered_'%0A outf = u'C:%5C%5CFacilities%5C%5C'%0A # Print output location to standard output%0A print %22Output written to%22, main(inf, p, outf)%0A
4f32369efb0b2cd8540cc78132cadfbed6e68ae8
Read and write xls files
src/petlx/xls.py
src/petlx/xls.py
Python
0
@@ -0,0 +1,1090 @@ +%22%22%22%0ARead and write xls files, using xlrd.%0A%0A%22%22%22%0A%0Aimport os%0A%0Aimport petl%0A%0Afrom petlx.util import UnsatisfiedDependency%0A%0A%0Adep_message = %22%22%22%0AThe package xlrd is required. pip install xlrd.%0A%22%22%22%0A%0A%0Adef fromxls(filename, sheetname):%0A %22%22%22%0A Extract a table from a sheet in an Excel (.xls) file.%0A %0A N.B., the sheet name is case sensitive, so watch out for, e.g., 'Sheet1'.%0A%0A The package xlrd is required. Try %60%60pip install xlrd%60%60.%0A %0A %22%22%22%0A %0A return XLSView(filename, sheetname)%0A%0Aclass XLSView(petl.util.RowContainer):%0A %0A def __init__(self, filename, sheetname='Sheet1'):%0A self.filename = filename%0A self.sheetname = sheetname%0A%0A def __iter__(self):%0A try:%0A import xlrd%0A except ImportError as e:%0A raise UnsatisfiedDependency(e, dep_message)%0A%0A wb = xlrd.open_workbook(filename=self.filename)%0A ws = wb.sheet_by_name(self.sheetname)%0A return (ws.row_values(rownum) for rownum in range(0,ws.nrows))%0A %0A%0Aimport sys%0Afrom petlx.integration import integrate%0Aintegrate(sys.modules%5B__name__%5D)
24d742e444c84df99629d8a6aff7ca7e6c90f995
Add adhoc script to detect jobs with stuck ActiveInvocations list.
scheduler/misc/detect_stuck_active_invs.py
scheduler/misc/detect_stuck_active_invs.py
Python
0
@@ -0,0 +1,2353 @@ +#!/usr/bin/env python%0A# Copyright 2018 The LUCI Authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Finds jobs with old entries (%3E1d) in ActiveInvocations list.%0A%0AUsage:%0A prpc login%0A ./detect_stuck_active_invs.py luci-scheduler-dev.appspot.com%0A%0ARequires the caller to be in 'administrators' group.%0A%22%22%22%0A%0Aimport json%0Aimport subprocess%0Aimport sys%0Aimport time%0A%0A%0Adef prpc(host, method, body):%0A p = subprocess.Popen(%0A %5B'prpc', 'call', host, method%5D,%0A stdin=subprocess.PIPE,%0A stdout=subprocess.PIPE)%0A out, _ = p.communicate(json.dumps(body))%0A if p.returncode:%0A raise Exception('Call to %25s failed' %25 method)%0A return json.loads(out)%0A%0A%0Adef check_job(host, job_ref):%0A print 'Checking %25s/%25s' %25 (job_ref%5B'project'%5D, job_ref%5B'job'%5D)%0A%0A state = prpc(host, 'internal.admin.Admin.GetDebugJobState', job_ref)%0A active_invs = state.get('activeInvocations', %5B%5D)%0A if not active_invs:%0A print ' No active invocations'%0A return %5B%5D%0A%0A stuck = %5B%5D%0A for inv_id in active_invs:%0A print ' ...checking %25s' %25 inv_id%0A inv = prpc(host, 'scheduler.Scheduler.GetInvocation', %7B%0A 'jobRef': job_ref,%0A 'invocationId': inv_id,%0A %7D)%0A started = time.time() - int(inv%5B'startedTs'%5D) / 1000000.0%0A if started %3E 24 * 3600:%0A print ' it is stuck!'%0A stuck.append((job_ref, inv_id))%0A return stuck%0A%0A%0Adef main():%0A if len(sys.argv) != 2:%0A print %3E%3E sys.stderr, 'Usage: %25s %3Chost%3E' %25 sys.argv%5B0%5D%0A return 1%0A host = sys.argv%5B1%5D%0A%0A stuck = %5B%5D%0A for job in prpc(host, 'scheduler.Scheduler.GetJobs', %7B%7D)%5B'jobs'%5D:%0A stuck.extend(check_job(host, job%5B'jobRef'%5D))%0A%0A if not stuck:%0A print 'No invocations are stuck'%0A return%0A%0A print%0A print 'All stuck invocations: '%0A for job_ref, inv_id in stuck:%0A print '%25s/%25s %25s' %25 (job_ref%5B'project'%5D, job_ref%5B'job'%5D, inv_id)%0A%0A return 0%0A%0A%0Aif __name__ == '__main__':%0A sys.exit(main())%0A
cf78037980a9345c12b1e2562bc4eda63cea95b3
Add a simple regression test to go with r143260. CommandInterpreter::PreprocessCommand() should not infinite loop when a target has not been specified yet.
test/functionalities/backticks/TestBackticksWithoutATarget.py
test/functionalities/backticks/TestBackticksWithoutATarget.py
Python
0.000178
@@ -0,0 +1,565 @@ +%22%22%22%0ATest that backticks without a target should work (not infinite looping).%0A%22%22%22%0A%0Aimport os, time%0Aimport unittest2%0Aimport lldb%0Afrom lldbtest import *%0A%0Aclass BackticksWithNoTargetTestCase(TestBase):%0A%0A mydir = %22functionalities/backticks%22%0A%0A def test_backticks_no_target(self):%0A %22%22%22A simple test of backticks without a target.%22%22%22%0A self.expect(%22print %601+2-3%60%22,%0A substrs = %5B' = 0'%5D)%0A%0Aif __name__ == '__main__':%0A import atexit%0A lldb.SBDebugger.Initialize()%0A atexit.register(lambda: lldb.SBDebugger.Terminate())%0A unittest2.main()%0A
28e226a47d16fb6a52c937031be19d8832e7e5c4
Bump development version
ckeditor_filebrowser_filer/__init__.py
ckeditor_filebrowser_filer/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.1.1'
Python
0
@@ -38,9 +38,12 @@ '0. -1. +2.0.b 1'%0A
5e37cabc1253f573cf270883378157784ca1bf7c
Update train.py (#1733)
PaddleNLP/sequence_tagging_for_ner/train.py
PaddleNLP/sequence_tagging_for_ner/train.py
from __future__ import print_function import os import math import time import numpy as np import six import paddle import paddle.fluid as fluid import reader from network_conf import ner_net from utils import logger, load_dict from utils_extend import to_lodtensor, get_embedding def test(exe, chunk_evaluator, inference_program, test_data, test_fetch_list, place): chunk_evaluator.reset() for data in test_data(): word = to_lodtensor([x[0] for x in data], place) mark = to_lodtensor([x[1] for x in data], place) target = to_lodtensor([x[2] for x in data], place) rets = exe.run(inference_program, feed={"word": word, "mark": mark, "target": target}, fetch_list=test_fetch_list) num_infer = np.array(rets[0]) num_label = np.array(rets[1]) num_correct = np.array(rets[2]) chunk_evaluator.update(num_infer[0].astype('int64'), num_label[0].astype('int64'), num_correct[0].astype('int64')) return chunk_evaluator.eval() def main(train_data_file, test_data_file, vocab_file, target_file, emb_file, model_save_dir, num_passes, use_gpu, parallel, batch_size=200): if not os.path.exists(model_save_dir): os.mkdir(model_save_dir) word_dict = load_dict(vocab_file) label_dict = load_dict(target_file) word_vector_values = get_embedding(emb_file) word_dict_len = len(word_dict) label_dict_len = len(label_dict) if "CE_MODE_X" in os.environ: fluid.default_startup_program().random_seed = 110 avg_cost, feature_out, word, mark, target = ner_net( word_dict_len, label_dict_len, parallel) crf_decode = fluid.layers.crf_decoding( input=feature_out, param_attr=fluid.ParamAttr(name='crfw')) (precision, recall, f1_score, num_infer_chunks, num_label_chunks, num_correct_chunks) = fluid.layers.chunk_eval( input=crf_decode, label=target, chunk_scheme="IOB", num_chunk_types=int(math.ceil((label_dict_len - 1) / 2.0))) chunk_evaluator = fluid.metrics.ChunkEvaluator() inference_program = fluid.default_main_program().clone(for_test=True) test_fetch_list = [num_infer_chunks, num_label_chunks, num_correct_chunks] sgd_optimizer = fluid.optimizer.SGD(learning_rate=1e-3) sgd_optimizer.minimize(avg_cost) if "CE_MODE_X" not in os.environ: train_reader = paddle.batch( paddle.reader.shuffle( reader.data_reader(train_data_file, word_dict, label_dict), buf_size=20000), batch_size=batch_size) test_reader = paddle.batch( paddle.reader.shuffle( reader.data_reader(test_data_file, word_dict, label_dict), buf_size=20000), batch_size=batch_size) else: train_reader = paddle.batch( reader.data_reader(train_data_file, word_dict, label_dict), batch_size=batch_size) test_reader = paddle.batch( reader.data_reader(test_data_file, word_dict, label_dict), batch_size=batch_size) place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace() feeder = fluid.DataFeeder(feed_list=[word, mark, target], place=place) exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) embedding_name = 'emb' embedding_param = fluid.global_scope().find_var(embedding_name).get_tensor() embedding_param.set(word_vector_values, place) time_begin = time.time() for pass_id in six.moves.xrange(num_passes): chunk_evaluator.reset() for batch_id, data in enumerate(train_reader()): cost_var, nums_infer, nums_label, nums_correct = exe.run( fluid.default_main_program(), feed=feeder.feed(data), fetch_list=[ avg_cost, num_infer_chunks, num_label_chunks, num_correct_chunks ]) if batch_id % 5 == 0: print("Pass " + str(pass_id) + ", Batch " + str(batch_id) + ", Cost " + str(cost_var[0])) chunk_evaluator.update(nums_infer, nums_label, nums_correct) pass_precision, pass_recall, pass_f1_score = chunk_evaluator.eval() print("[TrainSet] pass_id:" + str(pass_id) + " pass_precision:" + str( pass_precision) + " pass_recall:" + str(pass_recall) + " pass_f1_score:" + str(pass_f1_score)) test_pass_precision, test_pass_recall, test_pass_f1_score = test( exe, chunk_evaluator, inference_program, test_reader, test_fetch_list, place) print("[TestSet] pass_id:" + str(pass_id) + " pass_precision:" + str( test_pass_precision) + " pass_recall:" + str(test_pass_recall) + " pass_f1_score:" + str(test_pass_f1_score)) save_dirname = os.path.join(model_save_dir, "params_pass_%d" % pass_id) fluid.io.save_inference_model(save_dirname, ['word', 'mark'], crf_decode, exe) if "CE_MODE_X" in os.environ: print("kpis train_precision %f" % pass_precision) print("kpis test_precision %f" % test_pass_precision) print("kpis train_duration %f" % (time.time() - time_begin)) if __name__ == "__main__": main( train_data_file="data/train", test_data_file="data/test", vocab_file="data/vocab.txt", target_file="data/target.txt", emb_file="data/wordVectors.txt", model_save_dir="models", num_passes=2000, use_gpu=False, parallel=False)
Python
0
@@ -5159,16 +5159,62 @@ ass_id)%0A + if %22CE_MODE_X%22 not in os.environ:%0A @@ -5275,16 +5275,20 @@ mark'%5D,%0A +
dc200e50020637650c8a5dfe76895b0a033a8cea
Add tests for verifying that deactivating password works
akvo/rsr/tests/models/test_login_logging.py
akvo/rsr/tests/models/test_login_logging.py
Python
0
@@ -0,0 +1,3299 @@ +# -*- coding: utf-8 -*-%0A%0A# Akvo Reporting is covered by the GNU Affero General Public License.%0A# See more details in the license.txt file located at the root folder of the Akvo RSR module.%0A# For additional details on the GNU license please see %3C http://www.gnu.org/licenses/agpl.html %3E.%0A%0Afrom datetime import timedelta%0A%0Afrom django.forms import ValidationError%0Afrom django.conf import settings%0Afrom django.test import Client%0A%0Afrom akvo.rsr.models import LoginLog%0Afrom akvo.rsr.models.login_log import MAX_FAILED_LOGINS%0Afrom akvo.rsr.tests.base import BaseTestCase%0A%0A%0Aclass LoginLoggingTestCase(BaseTestCase):%0A %22%22%22Tests for the login logging model%22%22%22%0A%0A def setUp(self):%0A self.email = '[email protected]'%0A self.password = 'password'%0A self.user = self.create_user(self.email, self.password)%0A self.c = Client(HTTP_HOST=settings.RSR_DOMAIN)%0A%0A def test_successful_login_creates_log_entry(self):%0A # When%0A self.c.login(username=self.email, password=self.password)%0A%0A # Then%0A logs = LoginLog.objects.filter(email=self.email)%0A self.assertTrue(logs.exists())%0A self.assertTrue(logs.first().success)%0A%0A def test_failed_login_creates_log_entry(self):%0A # When%0A with self.assertRaises(ValidationError):%0A self.c.login(username=self.email, password='')%0A%0A # Then%0A logs = LoginLog.objects.filter(email=self.email)%0A self.assertTrue(logs.exists())%0A self.assertFalse(logs.first().success)%0A%0A def test_password_deactivates_after_max_attempts(self):%0A # Given%0A for _ in range(MAX_FAILED_LOGINS - 1):%0A with self.assertRaises(ValidationError):%0A self.c.login(username=self.email, password='')%0A%0A # When%0A with self.assertRaises(ValidationError) as assertion:%0A self.c.login(username=self.email, password='')%0A%0A # Then%0A self.assertIn('Login has been disabled', assertion.exception.message)%0A%0A def test_logins_post_password_deactivation_ignored(self):%0A # When%0A for _ in range(MAX_FAILED_LOGINS + 10):%0A with self.assertRaises(ValidationError):%0A self.c.login(username=self.email, password='')%0A%0A with self.assertRaises(ValidationError) as assertion:%0A self.c.login(username=self.email, password=self.password)%0A%0A # Then%0A self.assertIn('Login has been disabled', assertion.exception.message)%0A logs = LoginLog.objects.filter(email=self.email)%0A self.assertEqual(MAX_FAILED_LOGINS, logs.count())%0A%0A def test_login_works_after_deactivation_time(self):%0A # Given%0A for _ in range(MAX_FAILED_LOGINS + 10):%0A with self.assertRaises(ValidationError):%0A self.c.login(username=self.email, password='')%0A # HACK: Set the creation time of these login attempts to older than login_disable_time%0A time_delta = settings.LOGIN_DISABLE_TIME * 2%0A creation_time = LoginLog.objects.first().created_at - timedelta(seconds=time_delta)%0A LoginLog.objects.update(created_at=creation_time)%0A%0A # When%0A self.c.login(username=self.email, password=self.password)%0A%0A # Then%0A log_entry = LoginLog.objects.filter(email=self.email).first()%0A self.assertTrue(log_entry.success)%0A
e28a6423f63a169b46ebe46e9690d3858f953909
Add tests
apps/commons/tests/test_accepted_locales.py
apps/commons/tests/test_accepted_locales.py
Python
0.000001
@@ -0,0 +1,3332 @@ +import os%0Aimport shutil%0A%0Afrom django.conf import settings%0Aimport test_utils%0A%0Aimport manage%0A%0A%0Aclass AcceptedLocalesTest(test_utils.TestCase):%0A %22%22%22Test lazy evaluation of locale related settings.%0A%0A Verify that some localization-related settings are lazily evaluated based %0A on the current value of the DEV variable. Depending on the value, %0A DEV_LANGUAGES or PROD_LANGUAGES should be used.%0A%0A %22%22%22%0A locale = manage.path('locale')%0A locale_bkp = manage.path('locale_bkp')%0A%0A @classmethod%0A def setup_class(cls):%0A %22%22%22Create a directory structure for locale/.%0A%0A Back up the existing locale/ directory and create the following %0A hierarchy in its place:%0A%0A - locale/en-US/LC_MESSAGES%0A - locale/fr/LC_MESSAGES%0A - locale/templates/LC_MESSAGES%0A - locale/empty_file%0A%0A Also, set PROD_LANGUAGES to ('en-US',).%0A%0A %22%22%22%0A if os.path.exists(cls.locale_bkp):%0A raise Exception('A backup of locale/ exists at %25s which might '%0A 'mean that previous tests didn%5C't end cleanly. '%0A 'Skipping the test suite.' %25 cls.locale_bkp)%0A cls.DEV = settings.DEV%0A cls.PROD_LANGUAGES = settings.PROD_LANGUAGES%0A cls.DEV_LANGUAGES = settings.DEV_LANGUAGES%0A settings.PROD_LANGUAGES = ('en-US',)%0A os.rename(cls.locale, cls.locale_bkp)%0A for loc in ('en-US', 'fr', 'templates'):%0A os.makedirs(os.path.join(cls.locale, loc, 'LC_MESSAGES'))%0A open(os.path.join(cls.locale, 'empty_file'), 'w').close()%0A%0A @classmethod%0A def teardown_class(cls):%0A %22%22%22Remove the testing locale/ dir and bring back the backup.%22%22%22%0A%0A settings.DEV = cls.DEV%0A settings.PROD_LANGUAGES = cls.PROD_LANGUAGES%0A settings.DEV_LANGUAGES = cls.DEV_LANGUAGES%0A shutil.rmtree(cls.locale)%0A os.rename(cls.locale_bkp, cls.locale)%0A%0A def test_build_dev_languages(self):%0A %22%22%22Test that the list of dev locales is built properly.%0A%0A On dev instances, the list of accepted locales should correspond to %0A the per-locale directories in locale/.%0A%0A %22%22%22%0A settings.DEV = True%0A assert (settings.DEV_LANGUAGES == %5B'en-US', 'fr'%5D or%0A settings.DEV_LANGUAGES == %5B'fr', 'en-US'%5D), %5C%0A 'DEV_LANGUAGES do not correspond to the contents of locale/.'%0A%0A def test_dev_languages(self):%0A %22%22%22Test the accepted locales on dev instances.%0A%0A On dev instances, allow locales defined in DEV_LANGUAGES.%0A%0A %22%22%22%0A settings.DEV = True%0A # simulate the successful result of the DEV_LANGUAGES list %0A # comprehension defined in settings.%0A settings.DEV_LANGUAGES = %5B'en-US', 'fr'%5D%0A assert settings.LANGUAGE_URL_MAP == %7B'en-us': 'en-US', 'fr': 'fr'%7D, %5C%0A ('DEV is True, but DEV_LANGUAGES are not used to define the '%0A 'allowed locales.')%0A%0A def test_prod_languages(self):%0A %22%22%22Test the accepted locales on prod instances.%0A%0A On stage/prod instances, allow locales defined in PROD_LANGUAGES.%0A%0A %22%22%22%0A settings.DEV = False%0A assert settings.LANGUAGE_URL_MAP == %7B'en-us': 'en-US'%7D, %5C%0A ('DEV is False, but PROD_LANGUAGES are not used to define the '%0A 'allowed locales.')%0A
bb9fd71dc06ac39b461b4109e341fe7cd4172c76
use self.create_socket()
tests/twisted/file-transfer/test-receive-file-and-disconnect.py
tests/twisted/file-transfer/test-receive-file-and-disconnect.py
import socket from file_transfer_helper import exec_file_transfer_test, ReceiveFileTest class ReceiveFileAndDisconnectTest(ReceiveFileTest): def receive_file(self): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect(self.address) # disconnect self.conn.Disconnect() self.q.expect('dbus-signal', signal='StatusChanged', args=[2, 1]) return True if __name__ == '__main__': exec_file_transfer_test(ReceiveFileAndDisconnectTest)
Python
0.000001
@@ -181,55 +181,26 @@ = s -ocket.socket(socket.AF_UNIX, socket.SOCK_STREAM +elf.create_socket( )%0A
5dfd7b1534e19242ab778d535e2de13b424578f7
Add examples
example.py
example.py
Python
0
@@ -0,0 +1,604 @@ +#!/usr/bin/python3%0A#%0A# Copyright (c) 2016, Fabian Affolter %[email protected]%3E%0A# Released under the MIT license. See LICENSE file for details.%0A#%0Aimport fixerio%0A%0A# Our base currency is the Czech Koruna instead of the default (EUR).%0ABASE = 'CZK'%0A%0Aexchange = fixerio.Fixer(base=BASE)%0A%0Aprint('Current exchange rates:')%0Afor currency, rate in exchange.convert().get('rates').items():%0A print('%7B%7D : %7B%7D'.format(currency, rate))%0A%0Aprint('Current exchange rates for CHF:')%0A# Check if the target currency exists%0Aif exchange.currency_available('CHF'):%0A print(exchange.convert().get('rates')%5B'CHF'%5D)%0A
5afdd7775dd1aa232d3ca8fa2852f4a36918f224
add management command to fix forms whose non-ascii chars are corrupted
corehq/apps/cleanup/management/commands/fix_corrupted_forms.py
corehq/apps/cleanup/management/commands/fix_corrupted_forms.py
Python
0
@@ -0,0 +1,1584 @@ +# encoding: utf-8%0Afrom __future__ import absolute_import%0Afrom __future__ import unicode_literals%0A%0Afrom django.core.management import BaseCommand%0A%0Afrom six.moves import input%0A%0Afrom corehq.apps.app_manager.dbaccessors import get_apps_by_id%0A%0ASUSPICIOUS_STRINGS = %5B%0A international_character.encode('utf-8').decode('latin1')%0A for international_character in %5B%0A '%C3%A1', '%C3%A9', '%C3%AD', '%C3%B3', '%C3%BA',%0A '%C3%81', '%C3%89', '%C3%8D', '%C3%93', '%C3%9A',%0A '%E2%80%99',%0A %5D # TODO - add more common non-ascii characters%0A%5D%0A%0A%0Aclass Command(BaseCommand):%0A%0A def add_arguments(self, parser):%0A parser.add_argument('domain')%0A parser.add_argument('app_id')%0A parser.add_argument('form_id')%0A parser.add_argument(%0A '--cleanup',%0A action='store_true',%0A dest='cleanup',%0A default=False,%0A )%0A%0A # https://dimagi-dev.atlassian.net/browse/HI-747%0A def handle(self, domain, app_id, form_id, cleanup=False, **options):%0A app = get_apps_by_id(domain, app_id)%5B0%5D%0A form = app.get_form(form_id)%0A source = form.source%0A if any(suspicious_string in source for suspicious_string in SUSPICIOUS_STRINGS):%0A print('FORM CONTAINS SUSPICIOUS STRING')%0A if cleanup:%0A if 'y' == input('Did you confirm that there are no app updates to publish? %5By/N%5D'):%0A print('Cleaning form...')%0A form.source = source.encode('latin1').decode('utf-8')%0A app.save()%0A print('Done.')%0A else:%0A print('Aborting...')%0A
3cf56093b9d132a5089a70a12feb73c4be987da8
Add mtnpatch.py, a script to parse and import a full monotone diff
contrib/mtnpatch.py
contrib/mtnpatch.py
Python
0
@@ -0,0 +1,2202 @@ +#!/usr/bin/env python%0Aimport sys, os, string, getopt%0A%0Amtncmd = %22monotone%22%0A%0Adef main(argv = None):%0A if argv is None:%0A argv = sys.argv%0A opts, list = getopt.getopt(sys.argv%5B1:%5D, ':R')%0A if len(list) %3C 1:%0A print %22You must specify a file%22%0A return 2%0A reverse = False%0A for o, a in opts:%0A if o == %22-R%22:%0A reverse = True%0A if os.path.exists(list%5B0%5D):%0A input = open(list%5B0%5D, 'r')%0A renameFrom = %22%22%0A cmd = %22%22%0A if reverse:%0A print %22patch -R -p0 %3C %25s%22 %25 list%5B0%5D%0A else:%0A print %22patch -p0 %3C %25s%22 %25 list%5B0%5D%0A for line in input:%0A if len(line) %3E 0:%0A if line%5B0%5D == '#':%0A parts = line.split()%0A if len(parts) %3E 2:%0A cmd = parts%5B1%5D%0A # deal with whilespace in filenames (badly)%0A fileName = parts%5B2%5D%0A i = 3%0A while i %3C len(parts) and fileName.count('%22') %25 2:%0A fileName += %22 %25s%22 %25 parts%5Bi%5D%0A if cmd == %22delete_file%22:%0A if reverse:%0A print %22%25s add %25s%22 %25 (mtncmd, fileName)%0A else:%0A print %22%25s drop -e %25s%22 %25 (mtncmd, fileName)%0A elif cmd == %22add_file%22:%0A if reverse:%0A print %22%25s drop -e %25s%22 %25 (mtncmd, fileName)%0A else:%0A print %22%25s add %25s%22 %25 (mtncmd, fileName)%0A elif cmd == %22rename_file%22:%0A renameFrom = fileName%0A elif cmd == %22to%22 and renameFrom != %22%22:%0A if reverse:%0A print %22%25s rename -e %25s %25s%22 %25 (mtncmd, fileName, renameFrom)%0A else:%0A print %22%25s rename -e %25s %25s%22 %25 (mtncmd, renameFrom, fileName)%0A renameFrom = %22%22%0A else:%0A cmd = %22%22%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(main())%0A
4430b1957b642e87cd263455e371bf1d634101b0
Add buildone command
cerbero/commands/buildone.py
cerbero/commands/buildone.py
Python
0.000005
@@ -0,0 +1,1799 @@ +# cerbero - a multi-platform build system for Open Source software%0A# Copyright (C) 2012 Andoni Morales Alastruey %[email protected]%3E%0A#%0A# This library is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU Library General Public%0A# License as published by the Free Software Foundation; either%0A# version 2 of the License, or (at your option) any later version.%0A#%0A# This library is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU%0A# Library General Public License for more details.%0A#%0A# You should have received a copy of the GNU Library General Public%0A# License along with this library; if not, write to the%0A# Free Software Foundation, Inc., 59 Temple Place - Suite 330,%0A# Boston, MA 02111-1307, USA.%0A%0A%0A#from cerbero.oven import Oven%0Afrom cerbero.commands import Command, register_command%0Afrom cerbero.cookbook import CookBook%0Afrom cerbero.errors import FatalError%0Afrom cerbero.oven import Oven%0Afrom cerbero.utils import _, N_, ArgparseArgument%0A%0A%0Aclass BuildOne(Command):%0A doc = N_('Build or rebuild a single recipe without its dependencies')%0A name = 'buildone'%0A%0A def __init__(self):%0A Command.__init__(self,%0A %5BArgparseArgument('recipe', nargs=1,%0A help=_('name of the recipe to build')),%0A %5D)%0A%0A def run(self, config, args):%0A cookbook = CookBook.load(config)%0A recipe_name = args.recipe%5B0%5D%0A%0A recipe = cookbook.get_recipe(recipe_name)%0A if recipe is None:%0A raise FatalError(_(%22Recipe %25s not found%22 %25 recipe_name))%0A%0A oven = Oven(recipe, cookbook, force=True, no_deps=True)%0A oven.start_cooking()%0A%0Aregister_command(BuildOne)%0A
6244e0b40d847687b7ff875a48fb08060efc97bf
Solve Within PyCharm
Newsolver.py
Newsolver.py
Python
0.000039
@@ -0,0 +1,1342 @@ +from __future__ import division%0D%0Afrom pyomo.environ import *%0D%0Afrom pyomo.opt import SolverFactory%0D%0A%0D%0A%0D%0Amodel = AbstractModel()%0D%0A%0D%0Amodel.M = Set()%0D%0Amodel.N = Set()%0D%0A%0D%0Amodel.n = Param()%0D%0Amodel.c = Param(model.M, model.N)%0D%0A%0D%0Amodel.x = Var(model.M, model.N, domain=Binary)%0D%0Amodel.u = Var(model.M, domain=NonNegativeIntegers)%0D%0A%0D%0Adef object(model):%0D%0A return sum(model.c%5Bi,j%5D*model.x%5Bi,j%5D for (i,j) in model.M*model.N if i!=j)%0D%0A%0D%0Amodel.obj = Objective(rule=object)%0D%0A%0D%0Adef const1(model,j):%0D%0A return sum(model.x%5Bi,j%5D for i in model.M if i!=j) == 1%0D%0A%0D%0Amodel.cons = Constraint(model.N, rule= const1)%0D%0A%0D%0Adef const2(model,i):%0D%0A return sum(model.x%5Bi,j%5D for j in model.N if j!=i) ==1%0D%0A%0D%0Amodel.cons2 = Constraint(model.M, rule=const2)%0D%0A%0D%0Adef const3(model,i,j):%0D%0A if i==j or i %3C2 or j%3C2:%0D%0A return Constraint.Skip%0D%0A return model.u%5Bi%5D-model.u%5Bj%5D+model.n*model.x%5Bi,j%5D %3C= model.n-1%0D%0A%0D%0Amodel.cons3 = Constraint(model.M, model.N, rule=const3)%0D%0A%0D%0Ainstance = model.create(%22salesman.dat%22)%0D%0Ainstance.pprint()%0D%0A%0D%0Aopt = SolverFactory('glpk')%0D%0A%0D%0Aresults = opt.solve(instance, tee=True)%0D%0Aresults.write()%0D%0Ainstance.solutions.load_from(results)%0D%0A%0D%0Afor v in instance.component_objects(Var, active=True):%0D%0A print (%22Variable%22,v)%0D%0A varobject = getattr(instance, str(v))%0D%0A for index in varobject:%0D%0A print (%22 %22,index, varobject%5Bindex%5D.value)
00f766b24865e8010411105794f20bc0ef39a6dc
Add py-sphinxcontrib-devhelp package (#13278)
var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py
var/spack/repos/builtin/packages/py-sphinxcontrib-devhelp/package.py
Python
0
@@ -0,0 +1,793 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PySphinxcontribDevhelp(PythonPackage):%0A %22%22%22sphinxcontrib-devhelp is a sphinx extension which outputs%0A Devhelp document.%22%22%22%0A%0A homepage = %22http://sphinx-doc.org/%22%0A url = %22https://pypi.io/packages/source/s/sphinxcontrib-devhelp/sphinxcontrib-devhelp-1.0.1.tar.gz%22%0A%0A version('1.0.1', sha256='6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34')%0A%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-setuptools', type='build')%0A%0A def test(self):%0A # Requires sphinx, creating a circular dependency%0A pass%0A
295afe540c24ded86353402d87c42e072f7a64fa
Initialize makePublicPrivateKeys
books/CrackingCodesWithPython/Chapter23/makePublicPrivateKeys.py
books/CrackingCodesWithPython/Chapter23/makePublicPrivateKeys.py
Python
0.000004
@@ -0,0 +1,2682 @@ +# Public Key Generator%0A# https://www.nostarch.com/crackingcodes/ (BSD Licensed)%0A%0Aimport random, sys, os, primeNum, cryptomath%0A%0A%0Adef main():%0A # Create a public/private keypair with 1024-bit keys:%0A print('Making key files...')%0A makeKeyFiles('al_sweigart', 1024)%0A print('Key files made.')%0A%0Adef generateKey(keySize):%0A # Creates public/private keys keySize bits in size.%0A p = 0%0A q = 0%0A # Step 1: Create two prime numbers, p and q. Calculate n = p * q:%0A print('Generating p prime...')%0A while p == q:%0A p = primeNum.generateLargePrime(keySize)%0A q = primeNum.generateLargePrime(keySize)%0A n = p * q%0A%0A # Step 2: Create a number e that is relatively prime to (p-1)*(q-1):%0A print('Generating e that is relatively prime to (p-1)*(q-1)...')%0A while True:%0A # Keep trying random numbers for e until one is valid:%0A e = random.randrange(2 ** (keySize - 1), 2 ** (keySize))%0A if cryptomath.gcd(e, (p - 1) * (q - 1)) == 1:%0A break%0A%0A # Step 3: Calculate d, the mod inverse of e:%0A print('Calculating d that is mod inverse of e...')%0A d = cryptomath.findModInverse(e, (p - 1) * (q - 1))%0A%0A publicKey = (n, e)%0A privateKey = (n, d)%0A%0A print('Public key:', publicKey)%0A print('Private key:', privateKey)%0A%0A return (publicKey, privateKey)%0A%0A%0Adef makeKeyFiles(name, keySize):%0A # Creates two files 'x_pubkey.txt' and 'x_privkey.txt' (where x%0A # is the value in name) with the n,e and d,e integers written in%0A # them, delimited by a comma.%0A%0A # Our safety check will prevent us from overwriting our old key files:%0A if os.path.exists('%25s_pubkey.txt' %25 (name)) or os.path.exists('%25s_privkey.txt' %25 (name)):%0A sys.exit('WARNING: The file %25s_pubkey.txt or %25s_privkey.txt already exists! Use a different name or delete these files and rerun this program.' %25 (name, name))%0A%0A publicKey, privateKey = generateKey(keySize)%0A%0A print()%0A print('The public key is a %25s and a %25s digit number.' %25 (len(str(publicKey%5B0%5D)), len(str(publicKey%5B1%5D))))%0A print('Writing public key to file %25s_pubkey.txt...' %25 (name))%0A fo = open('%25s_pubkey.txt' %25 (name), 'w')%0A fo.write('%25s,%25s,%25s' %25 (keySize, publicKey%5B0%5D, publicKey%5B1%5D))%0A fo.close()%0A%0A print()%0A print('The private key is a %25s and a %25s digit number.' %25 (len(str(publicKey%5B0%5D)), len(str(publicKey%5B1%5D))))%0A print('Writing private key to file %25s_privkey.txt...' %25 (name))%0A fo = open('%25s_privkey.txt' %25 (name), 'w')%0A fo.write('%25s,%25s,%25s' %25 (keySize, privateKey%5B0%5D, privateKey%5B1%5D))%0A fo.close()%0A%0A%0A# If makePublicPrivateKeys.py is run (instead of imported as a module),%0A# call the main() function:%0Aif __name__ == '__main__':%0A main()
3e97731449027e5ac0d3a047e1b872956feac528
Create cracking-the-safe.py
Python/cracking-the-safe.py
Python/cracking-the-safe.py
Python
0.000004
@@ -0,0 +1,1937 @@ +# Time: O(k%5En)%0A# Space: O(k%5En)%0A%0A# There is a box protected by a password.%0A# The password is n digits, where each letter can be one of the first k digits 0, 1, ..., k-1.%0A#%0A# You can keep inputting the password,%0A# the password will automatically be matched against the last n digits entered.%0A#%0A# For example, assuming the password is %22345%22,%0A# I can open it when I type %22012345%22, but I enter a total of 6 digits.%0A#%0A# Please return any string of minimum length that is guaranteed to open the box after the entire string is inputted.%0A#%0A# Example 1:%0A# Input: n = 1, k = 2%0A# Output: %2201%22%0A# Note: %2210%22 will be accepted too.%0A# %0A# Example 2:%0A# Input: n = 2, k = 2%0A# Output: %2200110%22%0A# Note: %2201100%22, %2210011%22, %2211001%22 will be accepted too.%0A# %0A# Note:%0A# - n will be in the range %5B1, 4%5D.%0A# - k will be in the range %5B1, 10%5D.%0A# - k%5En will be at most 4096.%0A%0A# https://en.wikipedia.org/wiki/De_Bruijn_sequence%0Aclass Solution(object):%0A def crackSafe(self, n, k):%0A %22%22%22%0A :type n: int%0A :type k: int%0A :rtype: str%0A %22%22%22%0A M = k**(n-1)%0A P = %5Bq*k+i for i in xrange(k) for q in xrange(M)%5D%0A result = %5B%5D%0A for i in xrange(k**n):%0A j = i%0A while P%5Bj%5D %3E= 0:%0A result.append(str(j//M))%0A P%5Bj%5D, j = -1, P%5Bj%5D%0A return %22%22.join(result) + %220%22*(n-1)%0A%0A%0A# Time: O(n *k%5En)%0A# Space: O(n *k%5En)%0Aclass Solution2(object):%0A def crackSafe(self, n, k):%0A %22%22%22%0A :type n: int%0A :type k: int%0A :rtype: str%0A %22%22%22%0A def dfs(k, node, lookup, result):%0A for i in xrange(k):%0A neigbor = node + str(i)%0A if neigbor not in lookup:%0A lookup.add(neigbor)%0A dfs(k, neigbor%5B1:%5D, lookup, result)%0A result.append(str(i))%0A lookup = set()%0A result = %5B%5D%0A dfs(k, %220%22*(n-1), lookup, result)%0A return %22%22.join(result) + %220%22*(n-1)%0A
9c2487ab2c3b8d12e5a5f0f179b2a1fd79496b17
add tests
doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py
doajtest/unit/event_consumers/test_application_publisher_in_progress_notify.py
Python
0
@@ -0,0 +1,3096 @@ +from portality import models%0Afrom portality import constants%0Afrom portality.bll import exceptions%0Afrom doajtest.helpers import DoajTestCase%0Afrom doajtest.fixtures import ApplicationFixtureFactory%0Aimport time%0A%0Afrom portality.events.consumers.application_publisher_inprogress_notify import ApplicationPublisherInprogresNotify%0A%0A%0Aclass TestApplicationPublisherInProgressNotify(DoajTestCase):%0A def setUp(self):%0A super(TestApplicationPublisherInProgressNotify, self).setUp()%0A%0A def tearDown(self):%0A super(TestApplicationPublisherInProgressNotify, self).tearDown()%0A%0A def test_consumes(self):%0A source = ApplicationFixtureFactory.make_application_source()%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS, context=%7B%22application%22: %7B%7D, %22old_status%22: %22pending%22, %22new_status%22: %22in progress%22%7D)%0A assert ApplicationPublisherInprogresNotify.consumes(event)%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS,%0A context=%7B%22application%22: %7B%7D, %22old_status%22: %22in progress%22, %22new_status%22: %22in progress%22%7D)%0A assert not ApplicationPublisherInprogresNotify.consumes(event)%0A%0A event = models.Event(%22test:event%22, context=%7B%22application%22 : %7B%7D%7D)%0A assert not ApplicationPublisherInprogresNotify.consumes(event)%0A%0A event = models.Event(constants.EVENT_APPLICATION_STATUS)%0A assert not ApplicationPublisherInprogresNotify.consumes(event)%0A%0A def test_consume_success(self):%0A self._make_and_push_test_context(%22/%22)%0A%0A acc = models.Account()%0A acc.set_id(%22publisher%22)%0A acc.set_email(%[email protected]%22)%0A acc.save()%0A%0A source = ApplicationFixtureFactory.make_application_source()%0A event = models.Event(constants.EVENT_APPLICATION_STATUS,%0A context=%7B%22application%22: source, %22old_status%22: %22pending%22,%0A %22new_status%22: %22in progress%22%7D)%0A%0A # event = models.Event(constants.EVENT_APPLICATION_STATUS, context=%7B%22application%22: %22abcdefghijk%22, %22old_status%22: %22in progress%22, %22new_status%22: %22revisions_required%22%7D)%0A ApplicationPublisherInprogresNotify.consume(event)%0A%0A time.sleep(2)%0A ns = models.Notification.all()%0A assert len(ns) == 1%0A%0A n = ns%5B0%5D%0A assert n.who == %22publisher%22, %22Expected: %7B%7D, Received: %7B%7D%22.format(%22publisher%22, n.who)%0A assert n.created_by == ApplicationPublisherInprogresNotify.ID, %22Expected: %7B%7D, Received: %7B%7D%22.format(ApplicationPublisherInprogresNotify.ID, n.created_by)%0A assert n.classification == constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE, %22Expected: %7B%7D, Received: %7B%7D%22.format(constants.NOTIFICATION_CLASSIFICATION_STATUS_CHANGE, n.classification)%0A assert n.message is not None%0A assert n.action is None%0A assert not n.is_seen()%0A%0A def test_consume_fail(self):%0A event = models.Event(constants.EVENT_APPLICATION_ASSED_ASSIGNED, context=%7B%22application%22: %7B%22dummy%22 : %22data%22%7D%7D)%0A with self.assertRaises(exceptions.NoSuchObjectException):%0A ApplicationPublisherInprogresNotify.consume(event)%0A%0A
73ae4839941b802870eaba29b67c8b8a89e43c71
add backend_service_migration script to call the migration handler
backend_service_migration.py
backend_service_migration.py
Python
0.000001
@@ -0,0 +1,3722 @@ +# Copyright 2020 Google LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# https://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22 The script takes the arguments and run the backend service migration handler.%0A%0ABefore running:%0A 1. If not already done, enable the Compute Engine API%0A and check the quota for your project at%0A https://console.developers.google.com/apis/api/compute%0A 2. This sample uses Application Default Credentials for authentication.%0A If not already done, install the gcloud CLI from%0A https://cloud.google.com/sdk and run%0A %60gcloud beta auth application-default login%60.%0A For more information, see%0A https://developers.google.com/identity/protocols/application-default-credentials%0A 3. Install the Python client library for Google APIs by running%0A %60pip install --upgrade google-api-python-client%60%0A%0ARun the script by terminal, for example:%0A python3 instance_group_migration.py --project_id=test-project%0A --zone=us-central1-a --instance_group_name=test-group --network=test-network%0A --subnetwork=test-network --preserve_external_ip=False%0A%0A%22%22%22%0Aimport warnings%0A%0Aimport argparse%0Afrom vm_network_migration.handlers.backend_service_migration import BackendServiceMigration%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser(%0A description=__doc__,%0A formatter_class=argparse.RawDescriptionHelpFormatter)%0A parser.add_argument('--project_id',%0A help='The project ID of the backend service.')%0A parser.add_argument('--region', default=None,%0A help='The region of the the backend service.')%0A parser.add_argument('--backend_service_name',%0A help='The name of the the backend service')%0A parser.add_argument('--network', help='The name of the new network')%0A parser.add_argument(%0A '--subnetwork',%0A default=None,%0A help='The name of the subnetwork. For auto mode networks,'%0A ' this field is optional')%0A parser.add_argument(%0A '--preserve_external_ip',%0A default=False,%0A help='Preserve the external IP address')%0A%0A args = parser.parse_args()%0A%0A if args.preserve_external_ip == 'True':%0A args.preserve_external_ip = True%0A else:%0A args.preserve_external_ip = False%0A%0A if args.preserve_external_ip:%0A%0A warnings.warn(%0A 'You choose to preserve the external IP. If the original instance '%0A 'has an ephemeral IP, it will be reserved as a static external IP after the '%0A 'execution.',%0A Warning)%0A continue_execution = input(%0A 'Do you still want to preserve the external IP? y/n: ')%0A if continue_execution == 'n':%0A args.preserve_external_ip = False%0A%0A backend_service_migration = BackendServiceMigration(args.project_id,%0A args.backend_service_name,%0A args.network,%0A args.subnetwork,%0A args.preserve_external_ip,%0A args.region)%0A backend_service_migration.network_migration()%0A
a1b88f50edf9f30f3840c50067545f2d315596aa
create compare.py
part-1/compare.py
part-1/compare.py
Python
0.000001
@@ -0,0 +1,438 @@ +# coding: utf8%0A%0Aprint '''%0ACPython implementation detail: Objects of different types except numbers are ordered by their type names; objects of the same types that don%E2%80%99t support proper comparison are ordered by their address.%0A%0A%3E%3E%3E 5 %3C 'foo' # %3Ctype 'int'%3E %3C %3Ctype 'str'%3E%0ATrue%0A%3E%3E%3E 5 %3C (1, 2) %0ATrue%0A%3E%3E%3E 5 %3C %7B%7D %0ATrue%0A%3E%3E%3E 5 %3C %5B1, 2%5D %0ATrue%0A%0A%0A%3E%3E%3E %5B1, 2%5D %3E 'foo' # 'list' %3C 'str' %0AFalse%0A%3E%3E%3E (1, 2) %3E 'foo' # 'tuple' %3E 'str'%0ATrue%0A%0A'''%0A%0A
696960eba9da48a8eb4830f464ccacb792e0c435
Get the list of entities found by the name given in input
sara_flexbe_states/src/sara_flexbe_states/List_Entities_By_Name.py
sara_flexbe_states/src/sara_flexbe_states/List_Entities_By_Name.py
Python
0.999836
@@ -0,0 +1,2984 @@ +#!/usr/bin/env python%0A%0Afrom flexbe_core.proxy import ProxySubscriberCached%0Afrom flexbe_core import EventState, Logger%0Afrom sara_msgs.msg import Entities%0Afrom geometry_msgs.msg import Pose%0Afrom tf.transformations import euler_from_quaternion%0A%0Aimport math%0A%0A%0Aclass list_found_entities(EventState):%0A '''%0A will list people seen by the camera%0A%0A -- frontality_level float%0A #%3E found_entities object%0A #%3C name string%0A%0A %3C= found people are found%0A %3C= not_found no one is found%0A%0A '''%0A%0A def __init__(self, frontality_level):%0A '''%0A Constructor%0A '''%0A super(list_found_entities, self).__init__(outcomes=%5B'found', 'not_found'%5D, output_keys=%5B'list_found_entities', 'number'%5D, input_keys=%5B'name'%5D)%0A self._sub = ProxySubscriberCached(%7B'/entities': Entities%7D)%0A%0A self._topic = %22/robot_pose%22%0A self._subpos = ProxySubscriberCached(%7Bself._topic: Pose%7D)%0A self.frontality_level = frontality_level%0A self.mypose = None%0A self.message = None%0A%0A%0A def execute(self, userdata):%0A%0A if self._subpos.has_msg(self._topic):%0A self.mypose = userdata.pose = self._subpos.get_last_msg(self._topic)%0A%0A if self._sub.has_msg('/entities'):%0A Logger.loginfo('getting message')%0A self.message = self._sub.get_last_msg('/entities')%0A self._sub.remove_last_msg('/entities')%0A%0A if self.message is not None and self.mypose is not None:%0A found_entities = self.list()%0A userdata.list_found_entities = found_entities%0A userdata.number = len(found_entities)%0A%0A if len(found_entities) != 0:%0A return 'found'%0A else:%0A return 'not_found'%0A%0A def list(self):%0A found_entities = %5B%5D%0A wraps = %5B%5D%0A for entity in self.message.entities:%0A if entity.name == 'name':%0A wrap = wrapper()%0A wrap.init(self.mypose, entity, self.frontality_level)%0A%0A wraps.append(wrap)%0A%0A wraps.sort(key=wrapper.key)%0A%0A for wrap in wraps:%0A found_entities.append(wrap.entity)%0A%0A return found_entities%0A%0A%0Aclass wrapper():%0A def init(self, mypose, entity, frontality_level):%0A%0A self.entity = entity%0A%0A x = entity.position.x - mypose.position.x%0A y = entity.position.y - mypose.position.y%0A%0A quat = %5Bmypose.orientation.x, mypose.orientation.y, mypose.orientation.z, mypose.orientation.w%5D%0A euler = euler_from_quaternion(quat)%0A A = euler%5B2%5D%0A%0A a = math.tan(A)%0A b = y - x * a%0A%0A self.dist = (abs(y - a * x - b) / (1 + b ** 2) ** 0.5) * frontality_level%0A self.dist += (((entity.position.x - mypose.position.x) ** 2 + (%0A entity.position.y - mypose.position.y) ** 2) ** 0.5) * (1 - frontality_level)%0A self.dist /= entity.probability**2%0A%0A def key(self):%0A%0A return self.dist%0A
3f24e7b51281031fa9713b737a9647b305105a89
Write unittest for parse_file() in ConfigReader.py
src/unittests.py
src/unittests.py
Python
0.000001
@@ -0,0 +1,3199 @@ +from ConfigReader import ConfigReader as cr%0Aimport unittest%0Aimport os%0A%0Aclass testConfigReader(unittest.TestCase):%0A %22%22%22Test cases for configReader%22%22%22%0A%0A def setUp(self):%0A %22%22%22Set up some important variables%22%22%22%0A self.example_config_filename = 'testConfig.config'%0A %0A # Set some values%0A oauth_string = 'xxxxxxxxxxx'%0A nick_string = 'justinfan4242'%0A channels_string = 'channel1 channel2'%0A channels_list = %5B'channel1', 'channel2'%5D%0A log_string = 'default.log'%0A time_format_string = %22'%5B%25Y-%25m-%25d %25H:%25M:%25S%5D'%22%0A time_format_value = '%5B%25Y-%25m-%25d %25H:%25M:%25S%5D'%0A host_string = 'irc.twitch.tv'%0A port_string = '6667'%0A port_int = 6667%0A block_size_string = '4096'%0A block_size_int = 4096%0A reconnect_timer_string = '600'%0A reconnect_timer_int = 600%0A stayalive_timer_string = '0'%0A stayalive_timer_int = 0%0A connect_timeout_string = '10'%0A connect_timeout_float = 10%0A receive_timeout_string = '0.1'%0A receive_timeout_float = 0.1%0A%0A # Write a config file%0A config_file_string = 'oauth: ' + oauth_string + '%5Cn'%0A config_file_string += 'nick: ' + nick_string + '%5Cn'%0A config_file_string += 'channels: ' + channels_string + '%5Cn'%0A config_file_string += 'log: ' + log_string + '%5Cn'%0A config_file_string += 'time_format: ' + time_format_string + '%5Cn'%0A config_file_string += 'host: ' + host_string + '%5Cn'%0A config_file_string += 'port: ' + port_string + '%5Cn'%0A config_file_string += 'block_size: ' + block_size_string + '%5Cn'%0A config_file_string += 'reconnect_timer: ' + reconnect_timer_string + '%5Cn'%0A config_file_string += 'stayalive_timer: ' + stayalive_timer_string + '%5Cn'%0A config_file_string += 'connect_timeout: ' + connect_timeout_string + '%5Cn'%0A config_file_string += 'receive_timeout: ' + receive_timeout_string + '%5Cn'%0A %0A config_example = open(self.example_config_filename,'w')%0A config_example.write(config_file_string)%0A config_example.close()%0A%0A self.exemplar_config = %7B%0A 'oauth': oauth_string,%0A 'nick': nick_string,%0A 'channels': channels_list,%0A 'log': log_string,%0A 'time_format': time_format_value,%0A 'host': host_string,%0A 'port': port_int,%0A 'block_size': block_size_int,%0A 'reconnect_timer': reconnect_timer_int,%0A 'stayalive_timer': stayalive_timer_int,%0A 'connect_timeout': connect_timeout_float,%0A 'receive_timeout': receive_timeout_float%0A %7D%0A%0A def test_parse_file(self):%0A %22%22%22Test parse_file()%22%22%22%0A reader = cr()%0A reader.parse_file(self.example_config_filename)%0A self.assertEqual(reader.configuration, self.exemplar_config)%0A%0A %0A%0A def tearDown(self):%0A %22%22%22Delete the example config file, etc%22%22%22%0A %0A os.remove(self.example_config_filename)%0A %0Aif __name__ == '__main__':%0A unittest.main()%0A
bbd6e538ec45c3650b7b3b7d520613fb4967236a
Print 4x4 grid
python/reddit/think_python_grid.py
python/reddit/think_python_grid.py
Python
0.000017
@@ -0,0 +1,342 @@ +def grid():%0A delimiter_row = ('%7B%7D%7B%7D'.format('+ ', '- ' * 4) * 4) + '+'%0A openspace_row = ('%7B%7D%7B%7D'.format('%7C', ' ' * 9) * 4) + '%7C'%0A for box_row in range(4 * 4):%0A if box_row %25 4 == 0:%0A print(delimiter_row)%0A print(openspace_row)%0A else:%0A print(openspace_row)%0A print(delimiter_row)%0A%0Agrid()%0A
6e535a2d597f172d9342fb8a547335890c474b49
Add a sample config file
src/config-sample.py
src/config-sample.py
Python
0.000001
@@ -0,0 +1,225 @@ +FLASK_SECRET_KEY = 'Enter a Flask Secret Key'%0A%0A# OAuth Credentials. You can find them on%0A# https://www.yelp.com/developers/v3/manage_app%0AYELP_CLIENT_ID = 'Enter Yelp Client ID'%0AYELP_CLIENT_SECRET = 'Enter Yelp Client Secret'%0A
7b545e210aa534b5d76e30769a125285cb40bfa8
Create PrintFunctionBancorFormula.py
solidity/python/constants/PrintFunctionBancorFormula.py
solidity/python/constants/PrintFunctionBancorFormula.py
Python
0.000001
@@ -0,0 +1,1428 @@ +from math import factorial%0A%0A%0AMIN_PRECISION = 32%0AMAX_PRECISION = 127%0ANUM_OF_PRECISIONS = 128%0A%0A%0ANUM_OF_COEFS = 34%0AmaxFactorial = factorial(NUM_OF_COEFS)%0Acoefficients = %5BmaxFactorial/factorial(i) for i in range(NUM_OF_COEFS)%5D%0A%0A%0Adef fixedExpUnsafe(x,precision):%0A xi = x%0A res = safeMul(coefficients%5B0%5D,1 %3C%3C precision)%0A for i in range(1,NUM_OF_COEFS-1):%0A res = safeAdd(res,safeMul(xi,coefficients%5Bi%5D))%0A xi = safeMul(xi,x) %3E%3E precision%0A res = safeAdd(res,safeMul(xi,coefficients%5B-1%5D))%0A return res / coefficients%5B0%5D%0A%0A%0Adef safeMul(x,y):%0A assert(x * y %3C (1 %3C%3C 256))%0A return x * y%0A%0A%0Adef safeAdd(x,y):%0A assert(x + y %3C (1 %3C%3C 256))%0A return x + y%0A%0A%0Adef binarySearch(func,args):%0A lo = 1%0A hi = 1 %3C%3C 256%0A while lo+1 %3C hi:%0A mid = (lo+hi)/2%0A try:%0A func(mid,args)%0A lo = mid%0A except Exception,error:%0A hi = mid%0A try:%0A func(hi,args)%0A return hi%0A except Exception,error:%0A func(lo,args)%0A return lo%0A%0A%0AmaxExpArray = %5B0%5D*NUM_OF_PRECISIONS%0Afor precision in range(NUM_OF_PRECISIONS):%0A maxExpArray%5Bprecision%5D = binarySearch(fixedExpUnsafe,precision)%0A%0A%0Aprint ' function BancorFormula() %7B'%0Afor precision in range(NUM_OF_PRECISIONS):%0A prefix = ' ' if MIN_PRECISION %3C= precision %3C= MAX_PRECISION else '//'%0A print ' %7B%7D maxExpArray%5B%7B:3d%7D%5D = 0x%7B:x%7D;'.format(prefix,precision,maxExpArray%5Bprecision%5D)%0Aprint ' %7D'%0A
8b0130ccb318f7f04daf8e8fa7532c88afb9f7c2
convert eexec doctests into eexec_test.py
Tests/misc/eexec_test.py
Tests/misc/eexec_test.py
Python
0.000319
@@ -0,0 +1,557 @@ +from __future__ import print_function, division, absolute_import%0Afrom fontTools.misc.py23 import *%0Afrom fontTools.misc.eexec import decrypt, encrypt%0A%0A%0Adef test_decrypt():%0A testStr = b%22%5C0%5C0asdadads asds%5C265%22%0A decryptedStr, R = decrypt(testStr, 12321)%0A assert decryptedStr == b'0d%5Cnh%5Cx15%5Cxe8%5Cxc4%5Cxb2%5Cx15%5Cx1d%5Cx108%5Cx1a%3C6%5Cxa1'%0A assert R == 36142%0A%0A%0Adef test_encrypt():%0A testStr = b'0d%5Cnh%5Cx15%5Cxe8%5Cxc4%5Cxb2%5Cx15%5Cx1d%5Cx108%5Cx1a%3C6%5Cxa1'%0A encryptedStr, R = encrypt(testStr, 12321)%0A assert encryptedStr == b%22%5C0%5C0asdadads asds%5C265%22%0A assert R == 36142%0A
6515e45e6d717ed2c84789a5d0941533465496b7
update test
h2o-py/tests/testdir_munging/pyunit_insert_missing.py
h2o-py/tests/testdir_munging/pyunit_insert_missing.py
from builtins import zip from builtins import range import sys sys.path.insert(1,"../../") import h2o from tests import pyunit_utils def insert_missing(): # Connect to a pre-existing cluster data = [[1, 2, 3, 1, 'a', 1, 9], [1, 6, 4, 2, 'a', 1, 9], [2, 3, 8, 6, 'b', 1, 9], [3, 4, 3, 2, 'b', 3, 8], [4, 5, 9, 5, 'c', 2, 8], [5, 7, 10,7, 'b', 8, 8]] h2o_data = h2o.H2OFrame(data) h2o_data.insert_missing_values(fraction = 0.0) num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)]) assert num_nas == 0, "Expected no missing values inserted, but got {0}".format(num_nas) h2o_data.insert_missing_values(fraction = 1.0) num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)]) assert num_nas == h2o_data.nrow*h2o_data.ncol, "Expected all missing values inserted, but got {0}".format(num_nas) if __name__ == "__main__": pyunit_utils.standalone_test(insert_missing) else: insert_missing()
Python
0.000001
@@ -502,24 +502,44 @@ tion = 0.0)%0A + print(h2o_data)%0A num_nas @@ -541,35 +541,25 @@ _nas = sum(%5B -h2o_data%5Bc%5D +v .isna().sum( @@ -564,35 +564,29 @@ m() for -c +v in -range( h2o_data .ncol)%5D) @@ -569,38 +569,32 @@ or v in h2o_data -.ncol) %5D)%0A assert nu @@ -720,24 +720,44 @@ tion = 1.0)%0A + print(h2o_data)%0A num_nas @@ -767,19 +767,9 @@ um(%5B -h2o_data%5Bc%5D +v .isn @@ -786,27 +786,21 @@ for -c +v in -range( h2o_data .nco @@ -799,14 +799,8 @@ data -.ncol) %5D)%0A
1de77375a12e26693c89f5fe824df82719bc8632
Add dummy directory
jacquard/directory/dummy.py
jacquard/directory/dummy.py
Python
0.000001
@@ -0,0 +1,270 @@ +from .base import Directory%0A%0Aclass DummyDirectory(Directory):%0A def __init__(self, users=()):%0A self.users = %7Bx.id: x for x in users%7D%0A%0A def lookup(self, user_id):%0A return self.users%5Buser_id%5D%0A%0A def all_users(self):%0A return self.users.values()%0A
90eda86a7bbd1dc28023a6c5df1f964add3ddf55
add client test for oaipmh endpoint.
test/oaipmh_client_test.py
test/oaipmh_client_test.py
Python
0
@@ -0,0 +1,1075 @@ +import requests%0Afrom lxml import etree%0A%0ANS = %22%7Bhttp://www.openarchives.org/OAI/2.0/%7D%22%0A%0AJOURNAL_BASE_URL = %22http://localhost:5004/oai%22%0AARTICLE_BASE_URL = %22http://localhost:5004/oai.article%22%0A%0Adef harvest(base_url, resToken=None):%0A url = base_url + %22?verb=ListRecords%22%0A if resToken is not None:%0A url += %22&resumptionToken=%22 + resToken%0A else:%0A url += %22&metadataPrefix=oai_dc%22%0A%0A print %22harvesting %22 + url%0A resp = requests.get(url)%0A assert resp.status_code == 200, resp.text%0A%0A xml = etree.fromstring(resp.text%5B39:%5D)%0A rtel = xml.find(%22.//%22 + NS + %22resumptionToken%22)%0A if rtel is not None and (rtel.text is not None and rtel.text != %22%22):%0A print %22resumption token%22, rtel.text, %22cursor%22, rtel.get(%22cursor%22) + %22/%22 + rtel.get(%22completeListSize%22)%0A return rtel.text%0A%0A print %22no resumption token, complete%22%0A return None%0A%0A# journals%0Art = None%0Awhile True:%0A rt = harvest(JOURNAL_BASE_URL, rt)%0A if rt is None:%0A break%0A%0A# articles%0Art = None%0Awhile True:%0A rt = harvest(ARTICLE_BASE_URL, rt)%0A if rt is None:%0A break
cb18bcb02b86c185d946f9bf74d3e846fff7205c
fix error message
src/python/importer.py
src/python/importer.py
# Copyright (c) 2008 The Hewlett-Packard Development Company # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Nathan Binkert # Simple importer that allows python to import data from a dict of # code objects. The keys are the module path, and the items are the # filename and bytecode of the file. class CodeImporter(object): def __init__(self): self.modules = {} def add_module(self, filename, abspath, modpath, code): if modpath in self.modules: raise AttributeError, "%s already found in importer" self.modules[modpath] = (filename, abspath, code) def find_module(self, fullname, path): if fullname in self.modules: return self return None def load_module(self, fullname): # Because the importer is created and initialized in its own # little sandbox (in init.cc), the globals that were available # when the importer module was loaded and CodeImporter was # defined are not available when load_module is actually # called. Soooo, the imports must live here. import imp import os import sys mod = imp.new_module(fullname) sys.modules[fullname] = mod try: mod.__loader__ = self srcfile,abspath,code = self.modules[fullname] override = os.environ.get('M5_OVERRIDE_PY_SOURCE', 'false').lower() if override in ('true', 'yes') and os.path.exists(abspath): src = file(abspath, 'r').read() code = compile(src, abspath, 'exec') if os.path.basename(srcfile) == '__init__.py': mod.__path__ = fullname.split('.') mod.__file__ = srcfile exec code in mod.__dict__ except Exception: del sys.modules[fullname] raise return mod # Create an importer and add it to the meta_path so future imports can # use it. There's currently nothing in the importer, but calls to # add_module can be used to add code. import sys importer = CodeImporter() add_module = importer.add_module sys.meta_path.append(importer)
Python
0.000002
@@ -1979,16 +1979,26 @@ mporter%22 + %25 modpath %0A%0A
0ba3dff1e150d534e4eda086ebbd53ec3789d82c
Add alg_balance_symbols.py
alg_max_connected_colors.py
alg_max_connected_colors.py
Python
0.00058
@@ -0,0 +1,215 @@ +def max_connected_colors():%0A pass%0A%0A%0Adef main():%0A # A grid of connected colors: 5 (of 2's).%0A grid = %5B%5B1, 1, 2, 3%5D,%0A %5B1, 2, 3, 2%5D,%0A %5B3, 2, 2, 2%5D%5D%0A%0A%0Aif __init__ == '__main__':%0A main()%0A
ce6052ee9df9ca83ac2da691eb51a8eaea0ab603
Comment model migration
comments/migrations/0001_initial.py
comments/migrations/0001_initial.py
Python
0.000001
@@ -0,0 +1,1204 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.6 on 2016-05-10 22:41%0Afrom __future__ import unicode_literals%0A%0Afrom django.conf import settings%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A initial = True%0A%0A dependencies = %5B%0A ('blog', '0007_post_tags'),%0A migrations.swappable_dependency(settings.AUTH_USER_MODEL),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Comment',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('comment', models.TextField(max_length=500)),%0A ('is_removed', models.BooleanField(default=False)),%0A ('is_public', models.BooleanField(default=True)),%0A ('created', models.DateTimeField(auto_now_add=True)),%0A ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),%0A ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post')),%0A %5D,%0A ),%0A %5D%0A
74260fbf266628d4f8afbbab61bbd6de0ddfe7fe
Remove unused constant
dragonflow/neutron/common/constants.py
dragonflow/neutron/common/constants.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.api.definitions import portbindings DF_REMOTE_PORT_TYPE = 'remote_port' DF_BINDING_PROFILE_PORT_KEY = 'port_key' DF_BINDING_PROFILE_HOST_IP = 'host_ip' DF_PORT_BINDING_PROFILE = portbindings.PROFILE
Python
0.000247
@@ -570,62 +570,8 @@ e.%0A%0A -from neutron_lib.api.definitions import portbindings%0A%0A DF_R @@ -686,51 +686,4 @@ ip'%0A -DF_PORT_BINDING_PROFILE = portbindings.PROFILE%0A
22298d91fff788c37395cdad9245b3e7ed20cfdf
Add a snippet (Python OpenCV).
python/opencv/opencv_2/images/display_image_with_matplotlib.py
python/opencv/opencv_2/images/display_image_with_matplotlib.py
Python
0.000018
@@ -0,0 +1,1525 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A# Copyright (c) 2015 J%C3%A9r%C3%A9mie DECOCK (http://www.jdhp.org)%0A%0A%22%22%22%0AOpenCV - Display image: display an image given in arguments%0A%0ARequired: opencv library (Debian: aptitude install python-opencv)%0A%0ASee: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image%0A%22%22%22%0A%0Afrom __future__ import print_function%0A%0Aimport cv2 as cv%0Aimport argparse%0Afrom matplotlib import pyplot as plt%0A%0Adef main():%0A%0A # Parse the programm options (get the path of the image file to display)%0A%0A parser = argparse.ArgumentParser(description='An opencv snippet.')%0A parser.add_argument(%22--infile%22, %22-i%22, help=%22The picture file to display%22, required=True, metavar=%22FILE%22)%0A args = parser.parse_args()%0A%0A infile_str = args.infile%0A%0A # OpenCV%0A%0A # imread_flags is a flag which specifies the way image should be read:%0A # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.%0A # - cv.IMREAD_GRAYSCALE loads image in grayscale mode%0A # - cv.IMREAD_UNCHANGED loads image as such including alpha channel%0A imread_flags = cv.IMREAD_GRAYSCALE%0A img_np = cv.imread(infile_str, imread_flags)%0A%0A plt.imshow(img_np, cmap='gray', interpolation='none') # Display the image %22img_np%22 with matplotlib%0A plt.xticks(%5B%5D) # to hide tick values on X axis%0A plt.yticks(%5B%5D) # to hide tick values on Y axis%0A plt.show()%0A%0Aif __name__ == '__main__':%0A main()%0A
ba9e4c6b003cc002e5bc7216da960e47f9fe5424
Print information about all nitrogens.
copper_imidazole_csv_allnitrogen.py
copper_imidazole_csv_allnitrogen.py
Python
0
@@ -0,0 +1,2687 @@ +#!/usr/bin/env python2%0A%0Aimport orca_parser%0Afrom copper_imidazole_analysis import CopperImidazoleAnalysis%0Aimport argparse%0Aimport csv%0A%0Acia = CopperImidazoleAnalysis()%0A%0Aparser = argparse.ArgumentParser(description=%22Given pathnames of ORCA output files, make a dump of all nitrogen parameters to a CSV file.%22)%0A%0Aparser.add_argument(%22--csvname%22, dest=%22csvname%22, metavar=%22%3CCSV output root name%3E%22, type=str, default=%22nitrogen.csv%22, help=%22optional name for the CSV output file%22)%0Aparser.add_argument(dest=%22namelist%22, metavar=%22%3CORCA filename%3E%22, nargs=%22+%22, type=str, default=None, help=%22ORCA output files%22)%0A%0Aargs = parser.parse_args()%0Anamelist = args.namelist%0A%0Awith open(args.csvname, 'wb') as csvfile:%0A csvwriter = csv.writer(csvfile, delimiter=',')%0A%0A for name in namelist:%0A csvwriter.writerow(%5Bname%5D)%0A csvwriter.writerow(%5B%22g-tensor%22,%0A %22id_copper%22,%0A %22A_copper (MHz)%22,%0A %22euler_copper (deg.)%22,%0A %22NQCC_copper (MHz)%22,%0A %22eta_copper%22%5D)%0A%0A orcafile = orca_parser.ORCAOutputParser(name)%0A gtensor, giso = orcafile.return_gtensor()%0A id_copper = cia.copper_id(orcafile)%0A atensor_copper = cia.hyperfine(orcafile, id_copper)%0A euler_copper = cia.euler(orcafile, id_copper)%0A nqi_copper, nqcc_copper, eta_copper = cia.nqi(orcafile, id_copper)%0A%0A csvwriter.writerow(%5Bgtensor,%0A id_copper,%0A atensor_copper,%0A euler_copper,%0A nqcc_copper,%0A eta_copper%5D)%0A%0A csvwriter.writerow(%5B%22%22,%0A %22id_nitrogen%22,%0A %22A_nitrogen (MHz)%22,%0A %22euler_nitrogen (deg.)%22,%0A %22NQCC_nitrogen (MHz)%22,%0A %22eta_nitrogen%22,%0A %22Cu_N_distance (Angstroms)%22%5D)%0A%0A nitrogen_list = orcafile.find_element(%22N%22)%0A%0A for id_nitrogen in nitrogen_list:%0A atensor_nitrogen = cia.hyperfine(orcafile, id_nitrogen)%0A euler_nitrogen = cia.euler(orcafile, id_nitrogen)%0A nqi_nitrogen, nqcc_nitrogen, eta_nitrogen = cia.nqi(orcafile, id_nitrogen)%0A cu_n_dist = orcafile.pair_distance(id_copper, id_nitrogen)%0A%0A csvwriter.writerow(%5B%22%22,%0A id_nitrogen,%0A atensor_nitrogen,%0A euler_nitrogen,%0A nqcc_nitrogen,%0A eta_nitrogen,%0A cu_n_dist%5D)%0A
0a93e1d5355802746a513e7e64625f18fe9b3ec7
remove custom CodeInput._get_bbcode
designer/uix/designer_code_input.py
designer/uix/designer_code_input.py
import re from kivy import Config from kivy.utils import get_color_from_hex from pygments import styles, highlight from designer.helper_functions import show_alert from kivy.uix.codeinput import CodeInput from kivy.properties import BooleanProperty, Clock, partial class DesignerCodeInput(CodeInput): '''A subclass of CodeInput to be used for KivyDesigner. It has copy, cut and paste functions, which otherwise are accessible only using Keyboard. It emits on_show_edit event whenever clicked, this is catched to show EditContView; ''' __events__ = ('on_show_edit',) clicked = BooleanProperty(False) '''If clicked is True, then it confirms that this widget has been clicked. The one checking this property, should set it to False. :data:`clicked` is a :class:`~kivy.properties.BooleanProperty` ''' def __init__(self, **kwargs): super(DesignerCodeInput, self).__init__(**kwargs) parser = Config.get_configparser('DesignerSettings') if parser: parser.add_callback(self.on_codeinput_theme, 'global', 'code_input_theme') self.style_name = parser.getdefault('global', 'code_input_theme', 'emacs') def on_codeinput_theme(self, section, key, value, *args): if not value in styles.get_all_styles(): show_alert("Error", "This theme is not available") else: self.style_name = value def on_style_name(self, *args): super(DesignerCodeInput, self).on_style_name(*args) self.background_color = get_color_from_hex(self.style.background_color) self._trigger_refresh_text() def _get_bbcode(self, ntext): # override the default method to fix bug with custom themes # get bbcoded text for python try: ntext[0] # replace brackets with special chars that aren't highlighted # by pygment. can't use &bl; ... cause & is highlighted ntext = ntext.replace(u'[', u'\x01').replace(u']', u'\x02') ntext = highlight(ntext, self.lexer, self.formatter) ntext = ntext.replace(u'\x01', u'&bl;').replace(u'\x02', u'&br;') # replace special chars with &bl; and &br; ntext = ''.join((u'[color=', str(self.text_color), u']', ntext, u'[/color]')) ntext = ntext.replace(u'\n', u'') # remove possible extra highlight options ntext = ntext.replace(u'[u]', '').replace(u'[/u]', '') return ntext except IndexError: return '' def on_show_edit(self, *args): pass def on_touch_down(self, touch): '''Override of CodeInput's on_touch_down event. Used to emit on_show_edit ''' if self.collide_point(*touch.pos): self.clicked = True self.dispatch('on_show_edit') return super(DesignerCodeInput, self).on_touch_down(touch) def _do_focus(self, *args): '''Force the focus on this widget ''' self.focus = True def do_select_all(self, *args): '''Function to select all text ''' self.select_all() def find_next(self, search, use_regex=False, case=False): '''Find the next occurrence of the string according to the cursor position ''' text = self.text if not case: text = text.upper() search = search.upper() lines = text.splitlines() col = self.cursor_col row = self.cursor_row found = -1 size = 0 # size of string before selection line = None search_size = len(search) for i, line in enumerate(lines): if i >= row: if use_regex: if i == row: line_find = line[col + 1:] else: line_find = line[:] found = re.search(search, line_find) if found: search_size = len(found.group(0)) found = found.start() else: found = -1 else: # if on current line, consider col if i == row: found = line.find(search, col + 1) else: found = line.find(search) # has found the string. found variable indicates the initial po if found != -1: self.cursor = (found, i) break size += len(line) if found != -1: pos = text.find(line) + found self.select_text(pos, pos + search_size) def find_prev(self, search, use_regex=False, case=False): '''Find the previous occurrence of the string according to the cursor position ''' text = self.text if not case: text = text.upper() search = search.upper() lines = text.splitlines() col = self.cursor_col row = self.cursor_row lines = lines[:row + 1] lines.reverse() line_number = len(lines) found = -1 line = None search_size = len(search) for i, line in enumerate(lines): i = line_number - i - 1 if use_regex: if i == row: line_find = line[:col] else: line_find = line[:] found = re.search(search, line_find) if found: search_size = len(found.group(0)) found = found.start() else: found = -1 else: # if on current line, consider col if i == row: found = line[:col].find(search) else: found = line.find(search) # has found the string. found variable indicates the initial po if found != -1: self.cursor = (found, i) break if found != -1: pos = text.find(line) + found self.select_text(pos, pos + search_size)
Python
0.00002
@@ -1732,956 +1732,8 @@ ()%0A%0A - def _get_bbcode(self, ntext):%0A # override the default method to fix bug with custom themes%0A # get bbcoded text for python%0A try:%0A ntext%5B0%5D%0A # replace brackets with special chars that aren't highlighted%0A # by pygment. can't use &bl; ... cause & is highlighted%0A ntext = ntext.replace(u'%5B', u'%5Cx01').replace(u'%5D', u'%5Cx02')%0A ntext = highlight(ntext, self.lexer, self.formatter)%0A ntext = ntext.replace(u'%5Cx01', u'&bl;').replace(u'%5Cx02', u'&br;')%0A # replace special chars with &bl; and &br;%0A ntext = ''.join((u'%5Bcolor=', str(self.text_color), u'%5D',%0A ntext, u'%5B/color%5D'))%0A ntext = ntext.replace(u'%5Cn', u'')%0A # remove possible extra highlight options%0A ntext = ntext.replace(u'%5Bu%5D', '').replace(u'%5B/u%5D', '')%0A%0A return ntext%0A except IndexError:%0A return ''%0A%0A
eb0772fc6c30d98b83bf1c8e7d83af21066ae45b
Add peek method and implementation
data_structures/Stack/Python/Stack.py
data_structures/Stack/Python/Stack.py
# Author: AlexBanks97 # Purpose: LIFO Stack implementation using python array. # Date: October 15th 2017 class Stack(object): def __init__(self): # Initialize stack as empty array self.stack = [] # Return and remove the last element of the stack array. def pop(self): # If the stack is not empty, pop. if self.stack.length > 0: return self.stack.pop() # Add an element to the end of the stack array. def push(self, element): self.stack.append(element)
Python
0
@@ -512,16 +512,139 @@ .append(element) +%0A%0A # Return the last element of the stack array (without removing it).%0A def peek(self):%0A return self.stack%5B-1%5D
825c4d613915d43aea2e6ee0bc5d5b49ed0a4500
Create a simple method to segment a trip into sections
emission/analysis/classification/segmentation/section_segmentation.py
emission/analysis/classification/segmentation/section_segmentation.py
Python
0
@@ -0,0 +1,2964 @@ +# Standard imports%0Aimport attrdict as ad%0Aimport numpy as np%0Aimport datetime as pydt%0A%0A# Our imports%0Aimport emission.analysis.classification.cleaning.location_smoothing as ls%0Aimport emission.analysis.point_features as pf%0Aimport emission.storage.decorations.location_queries as lq%0A%0Adef segment_into_sections(trip):%0A points_df = lq.get_activities_for_section(trip)%0A no_tilting_points_df = points_df%5Bpoints_df.activity != lq.Activities.TILTING%5D%0A%0A section_list = %5B%5D%0A curr_section = ad.AttrDict(%7B%22user_id%22: trip.user_id, %22loc_filter%22: trip.loc_filter,%0A %22start_ts%22: trip.start_ts, %22start_time%22: trip.start_time,%0A %22activity%22: no_tilting_points_df.iloc%5B0%5D.activity%7D)%0A for idx, row in enumerate(no_tilting_points_df.to_dict('records')):%0A if row%5B%22activity%22%5D != curr_section.activity:%0A # Let's add a second check here for confidence and types of activities%0A if (row%5B'agc'%5D %3E 60 and%0A row%5B'activity'%5D != lq.Activities.UNKNOWN and%0A row%5B'activity'%5D != lq.Activities.STILL):%0A # Because the first section is initialized with the first activity.%0A # So when idx == 0, the activities will be equal and this is%0A # guaranteed to not be invoked%0A assert(idx %3E 0)%0A prev_ts = no_tilting_points_df.iloc%5Bidx-1%5D%5B%22write_ts%22%5D%0A print(%22At %25s, found new activity %25s compared to current %25s - creating new section with start_time %25s%22 %25%0A (str(pydt.datetime.fromtimestamp(row%5B%22write_ts%22%5D/1000)),%0A row%5B%22activity%22%5D, curr_section.activity,%0A str(pydt.datetime.fromtimestamp(prev_ts/1000))))%0A # complete this section%0A curr_section.end_ts = prev_ts%0A curr_section.end_time = str(pydt.datetime.fromtimestamp(curr_section.end_ts/1000))%0A section_list.append(curr_section)%0A # make a new section%0A curr_section = ad.AttrDict(%7B%22user_id%22: trip.user_id, %22loc_filter%22: trip.loc_filter,%0A %22start_ts%22: prev_ts,%0A %22start_time%22: pydt.datetime.fromtimestamp(prev_ts/1000),%0A %22activity%22: row%5B%22activity%22%5D%7D)%0A else:%0A print(%22At %25s, retained existing activity %25s%22 %25 %0A (str(pydt.datetime.fromtimestamp(row%5B%22write_ts%22%5D/1000)), curr_section.activity))%0A else:%0A print(%22At %25s, retained existing activity %25s%22 %25 %0A (str(pydt.datetime.fromtimestamp(row%5B%22write_ts%22%5D/1000)), curr_section.activity))%0A%0A print(%22Detected trip end! Ending section at %25s%22 %25 trip.end_time) %0A # End the last section at the same time as the trip%0A curr_section.end_ts = trip.end_ts%0A curr_section.end_time = trip.end_time%0A section_list.append(curr_section)%0A return section_list%0A
5f2cd26054adff5a1fbf9ba5d56766b972f46670
Add a multithreaded stress tester for key generation. Hopefully provides additional confidence that that code is correct with respect to threading.
leakcheck/thread-key-gen.py
leakcheck/thread-key-gen.py
Python
0
@@ -0,0 +1,868 @@ +# Copyright (C) Jean-Paul Calderone%0A# See LICENSE for details.%0A#%0A# Stress tester for thread-related bugs in RSA and DSA key generation. 0.12 and%0A# older held the GIL during these operations. Subsequent versions release it%0A# during them.%0A%0Afrom threading import Thread%0A%0Afrom OpenSSL.crypto import TYPE_RSA, TYPE_DSA, PKey%0A%0Adef generate_rsa():%0A keys = %5B%5D%0A for i in range(100):%0A key = PKey()%0A key.generate_key(TYPE_RSA, 1024)%0A keys.append(key)%0A%0Adef generate_dsa():%0A keys = %5B%5D%0A for i in range(100):%0A key = PKey()%0A key.generate_key(TYPE_DSA, 512)%0A keys.append(key)%0A%0A%0Adef main():%0A threads = %5B%5D%0A for i in range(3):%0A t = Thread(target=generate_rsa, args=())%0A threads.append(t)%0A t = Thread(target=generate_dsa, args=())%0A threads.append(t)%0A%0A for t in threads:%0A t.start()%0A%0Amain()%0A
cdab60ed217669fc3e84854b649ccd978a0c6118
fix typos
lg_earth/scripts/add_kml.py
lg_earth/scripts/add_kml.py
#!/usr/bin/env python from std_msgs.msg import String, Empty from lg_common.srv import USCSMessage from lg_common.msg import StringArray from interactivespaces_msgs.msg import GenericMessage import SimpleHTTPServer import SocketServer import threading import tempfile import rospy import json import copy import os import re import binascii DEFAULT_VIEWPORTS = ['left_three', 'left_two', 'left_one', 'center', 'right_one', 'right_two', 'right_three'] DEFAULT_EARTH_INSTANCE = { u'activity': u'earth', u'activity_config': {}, u'assets': [], u'height': 1920, u'presentation_viewport': u'CHANGE_ME', u'slug': -1875729098, u'width': 1080, u'x_coord': 0, u'y_coord': 0 } kml_id_pattern = re.compile('<kml .* id=\"()\".*>') def get_kml_id(kml): """ if <kml> tag has id attribute returns it value othervise return unsigned crc32 of kml string """ id_match = kml_id_pattern.search(kml, re.IGNORECASE) if id_match: return id_match.group(1) return hex(binascii.crc32(kml) % (1 << 32)) def get_match_any_starts_with(prefixes): def matcher(test_string): for prefix in prefixes: if test_string.startswith(prefix): return True return False return matcher class KMLAdder(): def __init__(self, uscs_service, director_pub, added_kml_pub, port, hostname='localhost', viewports=None): self.serve_dir = tempfile.mktemp() self.uscs_service = uscs_service self.director_pub = director_pub self.added_kml_pub = added_kml_pub self.id_to_file = dict() self.hostname = hostname self.viewports = viewports if self.viewports is None: self.viewports = DEFAULT_VIEWPORTS self.port = port self.server = threading.Thread(target=self._serve) os.mkdir(self.serve_dir) self.server.start() def handle_kml(self, msg): kml = msg.data filename = tempfile.mktemp(dir=self.serve_dir) with open(filename, 'w') as f: f.write(kml) kml_id = get_kml_id(kml) if not kml_id self.id_to_file: self.id_to_file[kml_id] = list() # Keep track of files for easier remove by id self.id_to_file[kml_id].append(os.path.basename(filename)) current_scene = self.uscs_service.call().message current_scene = json.loads(current_scene) self.add_earths(current_scene) for window in current_scene['windows']: if window['activity'] != 'earth': continue window['assets'].append(self.formatURL(filename)) new_msg = GenericMessage() new_msg.type = 'json' new_msg.message = json.dumps(current_scene) self.director_pub.publish(new_msg) self.added_kml_pub.publish(StringArray(list(self.id_to_file.keys()))) def formatURL(self, filename): return 'http://{}:{}/{}'.format(self.hostname, self.port, os.path.basename(filename)) def clear_kmls(self, msg): current_scene = self.uscs_service.call().message current_scene = json.loads(current_scene) ids = msg.strings if msg.strings else None matcher = None if ids: files = [name for name in names for names in self.id_to_file] for id in ids: self.id_to_file.pop(id) urls_to_remove = [self.formatURL(filename) for filename in files] matcher = get_match_any_starts_with(urls_to_remove) else: # Remove all additional kmls self.id_to_file = dict() matcher = get_match_any_starts_with([self.formatURL(self.serve_dir)]) for window in current_scene['windows']: if window['activity'] == 'earth': window['assets'] = [a for a in window['assets'] if not matcher(a)] new_msg = GenericMessage() new_msg.type = 'json' new_msg.message = json.dumps(current_scene) self.director_pub.publish(new_msg) self.added_kml_pub.publish(StringArray(list(self.id_to_file.keys()))) def _serve(self): os.chdir(self.serve_dir) Handler = SimpleHTTPServer.SimpleHTTPRequestHandler self.httpd = SocketServer.TCPServer(("", self.port), Handler) self.httpd.serve_forever() def add_earths(self, scene): for viewport in self.viewports: flag = False for window in scene['windows']: if window['activity'] == 'earth' and window['presentation_viewport'] == viewport: flag = True # if no instance of earth w/ our current viewport is found # we add one and give it our viewport if flag is False: scene['windows'].append(copy.deepcopy(DEFAULT_EARTH_INSTANCE)) scene['windows'][-1]['presentation_viewport'] = viewport def shutdown(self): self.httpd.shutdown() self.server.join() def main(): rospy.init_node('add_kml') director_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=10) added_kml_pub = rospy.Publisher('/lg_earth/added_kml', StringArray, latch=True) uscs_service = rospy.ServiceProxy('/uscs/message', USCSMessage) hostname = rospy.get_param('~hostname', 'localhost') port = rospy.get_param('~port', 18111) k = KMLAdder(uscs_service, director_pub, added_kml_pub, port, hostname) rospy.Subscriber('/lg_earth/add_kml', String, k.handle_kml) rospy.Subscriber('/lg_earth/clear_kml', StringArray, k.clear_kmls) rospy.on_shutdown(k.shutdown) rospy.spin() if __name__ == '__main__': main()
Python
0.999974
@@ -2153,19 +2153,22 @@ if -not kml_id +not in self @@ -5266,16 +5266,30 @@ tch=True +, queue_size=1 )%0A %0A
c87be0f98295d64addc01529999996b566c80f2c
add sent notification status
migrations/versions/00xx_add_sent_notification_status.py
migrations/versions/00xx_add_sent_notification_status.py
Python
0
@@ -0,0 +1,1734 @@ +%22%22%22empty message%0A%0ARevision ID: 00xx_add_sent_notification_status%0ARevises: 0075_create_rates_table%0ACreate Date: 2017-04-24 16:55:20.731069%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '00xx_sent_notification_status'%0Adown_revision = '0075_create_rates_table'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0Aenum_name = 'notify_status_type'%0Atmp_name = 'tmp_' + enum_name%0A%0Aold_options = (%0A 'created',%0A 'sending',%0A 'delivered',%0A 'pending',%0A 'failed',%0A 'technical-failure',%0A 'temporary-failure',%0A 'permanent-failure'%0A)%0Anew_options = old_options + ('sent',)%0A%0Aold_type = sa.Enum(*old_options, name=enum_name)%0Anew_type = sa.Enum(*new_options, name=enum_name)%0A%0Aalter_str = 'ALTER TABLE %7Btable%7D ALTER COLUMN status TYPE %7Benum%7D USING status::text::notify_status_type '%0A%0Adef upgrade():%0A op.execute('ALTER TYPE %7Benum%7D RENAME TO %7Btmp_name%7D'.format(enum=enum_name, tmp_name=tmp_name))%0A%0A new_type.create(op.get_bind())%0A op.execute(alter_str.format(table='notifications', enum=enum_name))%0A op.execute(alter_str.format(table='notification_history', enum=enum_name))%0A%0A op.execute('DROP TYPE ' + tmp_name)%0A%0A%0Adef downgrade():%0A op.execute('ALTER TYPE %7Benum%7D RENAME TO %7Btmp_name%7D'.format(enum=enum_name, tmp_name=tmp_name))%0A%0A # Convert 'sent' template into 'sending'%0A update_str = %22UPDATE TABLE %7Btable%7D SET status='sending' where status='sent'%22%0A%0A op.execute(update_str.format(table='notifications'))%0A op.execute(update_str.format(table='notification_history'))%0A%0A old_type.create(op.get_bind())%0A%0A op.execute(alter_str.format(table='notifications', enum=enum_name))%0A op.execute(alter_str.format(table='notification_history', enum=enum_name))%0A%0A op.execute('DROP TYPE ' + tmp_name)%0A
44526603b2ab388fceb99de625f3282c4a2a8d99
write the test with the proper constants
tests/modules/test_math.py
tests/modules/test_math.py
import math from ..base import BaseTopazTest class TestMath(BaseTopazTest): def assert_float_equal(self, result, expected, eps=1e-15): assert abs(result - expected) < eps def test_domain_error(self, space): space.execute("Math::DomainError") def test_pi(self, space): w_res = space.execute("return Math::PI") assert space.float_w(w_res) == math.pi def test_exp(self, space): w_res = space.execute("return [Math.exp(0.0), Math.exp(1)]") assert self.unwrap(space, w_res) == [1, math.exp(1)] def test_sqrt(self, space): w_res = space.execute("return [Math.sqrt(4), Math.sqrt(28)]") assert self.unwrap(space, w_res) == [2, math.sqrt(28)] def test_e(self, space): w_res = space.execute("return Math::E") assert space.float_w(w_res) == math.e def test_log(self, space): w_res = space.execute("return Math.log(4, 10)") self.assert_float_equal(space.float_w(w_res), math.log(4, 10)) w_res = space.execute("return Math.log(28)") self.assert_float_equal(space.float_w(w_res), math.log(28)) w_res = space.execute("return Math.log(3, 4)") self.assert_float_equal(space.float_w(w_res), math.log(3, 4)) def test_gamma(self, space): w_res = space.execute("return Math.gamma(5.0)") self.assert_float_equal(space.float_w(w_res), 24.0) w_res = space.execute("return Math.gamma(6.0)") self.assert_float_equal(space.float_w(w_res), 120.0) w_res = space.execute("return Math.gamma(0.5)") self.assert_float_equal(space.float_w(w_res), math.pi ** 0.5) w_res = space.execute("return Math.gamma(1000)") assert space.float_w(w_res) == float('inf') w_res = space.execute("return Math.gamma(0.0)") assert space.float_w(w_res) == float('inf') w_res = space.execute("return Math.gamma(-0.0)") assert space.float_w(w_res) == float('-inf') # inf w_res = space.execute("return Math.gamma(1e1000)") assert space.float_w(w_res) == float('inf') with self.raises(space, "DomainError", 'Numerical argument is out of domain - "gamma"'): space.execute("""Math.gamma(-1)""") with self.raises(space, "DomainError", 'Numerical argument is out of domain - "gamma"'): # -inf space.execute("""Math.gamma(-1e1000)""") # nan w_res = space.execute("return Math.gamma(1e1000 - 1e1000)") assert math.isnan(space.float_w(w_res))
Python
0
@@ -1979,22 +1979,8 @@ ')%0A%0A - # inf%0A @@ -2024,22 +2024,31 @@ h.gamma( -1e1000 +Float::INFINITY )%22)%0A @@ -2342,27 +2342,8 @@ '):%0A - # -inf%0A @@ -2383,35 +2383,30 @@ ma(- -1e1000 +Float::INFINITY )%22%22%22)%0A%0A - # nan%0A @@ -2454,23 +2454,18 @@ mma( -1e1000 - 1e1000 +Float::NAN )%22)%0A
7597e834288c21065703bcdc86530a0ad5414a95
backup strategy tasks
nodeconductor/backup/tasks.py
nodeconductor/backup/tasks.py
Python
0.000002
@@ -0,0 +1,351 @@ +from celery import shared_task%0A%0A%0A@shared_task%0Adef backup_task(backupable_instance):%0A backupable_instance.get_backup_strategy.backup()%0A%0A%0A@shared_task%0Adef restore_task(backupable_instance):%0A backupable_instance.get_backup_strategy.restore()%0A%0A%0A@shared_task%0Adef delete_task(backupable_instance):%0A backupable_instance.get_backup_strategy.delete()%0A
4820013e207947fe7ff94777cd8dcf1ed474eab1
Add migration for account lockout fields on User
migrations/versions/fb6a6554b21_add_account_lockout_fields_to_user.py
migrations/versions/fb6a6554b21_add_account_lockout_fields_to_user.py
Python
0
@@ -0,0 +1,929 @@ +%22%22%22Add account lockout fields to User%0A%0ARevision ID: fb6a6554b21%0ARevises: 1f9b411bf6df%0ACreate Date: 2015-10-29 01:07:27.930095%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = 'fb6a6554b21'%0Adown_revision = '1f9b411bf6df'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('users', sa.Column('failed_login_attempts', sa.Integer(), nullable=True))%0A op.add_column('users', sa.Column('last_failed_login_attempt', sa.DateTime(), nullable=True))%0A op.add_column('users', sa.Column('locked_out', sa.Boolean(), nullable=True))%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column('users', 'locked_out')%0A op.drop_column('users', 'last_failed_login_attempt')%0A op.drop_column('users', 'failed_login_attempts')%0A ### end Alembic commands ###%0A
cee7f23df93f4a09550348e30709aa1e6e6969fc
use net ip availability api def from neutron-lib
neutron/extensions/network_ip_availability.py
neutron/extensions/network_ip_availability.py
# Copyright 2016 GoDaddy. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from neutron_lib.api import extensions as api_extensions import neutron.api.extensions as extensions import neutron.api.v2.base as base import neutron.services.network_ip_availability.plugin as plugin RESOURCE_NAME = "network_ip_availability" RESOURCE_PLURAL = "network_ip_availabilities" COLLECTION_NAME = RESOURCE_PLURAL.replace('_', '-') EXT_ALIAS = RESOURCE_NAME.replace('_', '-') RESOURCE_ATTRIBUTE_MAP = { RESOURCE_PLURAL: { 'network_id': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'network_name': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'tenant_id': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'total_ips': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'used_ips': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'subnet_ip_availability': {'allow_post': False, 'allow_put': False, 'is_visible': True}, # TODO(wwriverrat) Make composite attribute for subnet_ip_availability } } class Network_ip_availability(api_extensions.ExtensionDescriptor): """Extension class supporting network ip availability information.""" @classmethod def get_name(cls): return "Network IP Availability" @classmethod def get_alias(cls): return EXT_ALIAS @classmethod def get_description(cls): return "Provides IP availability data for each network and subnet." @classmethod def get_updated(cls): return "2015-09-24T00:00:00-00:00" @classmethod def get_resources(cls): """Returns Extended Resource for service type management.""" resource_attributes = RESOURCE_ATTRIBUTE_MAP[RESOURCE_PLURAL] controller = base.create_resource( RESOURCE_PLURAL, RESOURCE_NAME, plugin.NetworkIPAvailabilityPlugin.get_instance(), resource_attributes) return [extensions.ResourceExtension(COLLECTION_NAME, controller, attr_map=resource_attributes)] def get_extended_resources(self, version): if version == "2.0": return RESOURCE_ATTRIBUTE_MAP else: return {}
Python
0.000003
@@ -577,16 +577,90 @@ cense.%0A%0A +from neutron_lib.api.definitions import network_ip_availability as apidef%0A from neu @@ -858,999 +858,8 @@ in%0A%0A -RESOURCE_NAME = %22network_ip_availability%22%0ARESOURCE_PLURAL = %22network_ip_availabilities%22%0ACOLLECTION_NAME = RESOURCE_PLURAL.replace('_', '-')%0AEXT_ALIAS = RESOURCE_NAME.replace('_', '-')%0A%0ARESOURCE_ATTRIBUTE_MAP = %7B%0A RESOURCE_PLURAL: %7B%0A 'network_id': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A 'network_name': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A 'tenant_id': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A 'total_ips': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A 'used_ips': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A 'subnet_ip_availability': %7B'allow_post': False, 'allow_put': False,%0A 'is_visible': True%7D,%0A # TODO(wwriverrat) Make composite attribute for subnet_ip_availability%0A %7D%0A%7D%0A%0A %0Acla @@ -900,16 +900,19 @@ ensions. +API Extensio @@ -1003,367 +1003,35 @@ %22%22%22%0A -%0A -@classmethod%0A def get_name(cls):%0A return %22Network IP Availability%22%0A%0A @classmethod%0A def get_alias(cls):%0A return EXT_ALIAS%0A%0A @classmethod%0A def get_description(cls):%0A return %22Provides IP availability data for each network and subnet.%22%0A%0A @classmethod%0A def get_updated(cls):%0A return %222015-09-24T00:00:00-00:00%22 +api_definition = apidef %0A%0A @@ -1172,16 +1172,23 @@ butes = +apidef. RESOURCE @@ -1202,16 +1202,36 @@ UTE_MAP%5B +%0A apidef. RESOURCE @@ -1286,32 +1286,39 @@ ce(%0A +apidef. RESOURCE_PLURAL, @@ -1330,16 +1330,23 @@ +apidef. RESOURCE @@ -1493,16 +1493,23 @@ tension( +apidef. COLLECTI @@ -1654,159 +1654,4 @@ s)%5D%0A -%0A def get_extended_resources(self, version):%0A if version == %222.0%22:%0A return RESOURCE_ATTRIBUTE_MAP%0A else:%0A return %7B%7D%0A
6fabbe85bb74788641897daf8b162eac3d47b0aa
Add script for downloading Indonesia price data
data_crunching/indonesia_timeseries/download_indonesia_prices.py
data_crunching/indonesia_timeseries/download_indonesia_prices.py
Python
0
@@ -0,0 +1,2158 @@ +#!/usr/bin/env python2%0A%0Aimport urllib2%0Aimport shutil%0Aimport re%0Aimport sys%0Aimport datetime%0Afrom lxml import etree%0A%0Ausage_str = %22%22%22%0AThis scripts downloads daily food prices from http://m.pip.kementan.org/index.php (Indonesia).%0A%0AProvide date in DD/MM/YYYY format.%0A%0AExample:%0A%0A ./download_indonesia_prices.py 15/03/2013%0A%22%22%22%0A%0Adef download_table(date):%0A %22%22%22Download price table for a given date%22%22%22%0A%0A main_url = 'http://m.pip.kementan.org/index.php'%0A%0A params = 'laporan=LHK-01&tanggal=%25s&bulan=%25s&tahun=%25s&pilihlaporan=View+Laporan' %25 (date.day, date.month, date.year)%0A%0A req = urllib2.Request(main_url, params)%0A response = urllib2.urlopen(req)%0A html_code = response.read()%0A%0A regex = re.compile(r'%3Cdiv id=%22content%22 align=%22center%22%3E.*(%3Ctable.+%3C/table%3E)', re.DOTALL)%0A match = regex.search(html_code)%0A if not match:%0A print %22ERROR: table not detected%22%0A sys.exit(1)%0A table_html = match.group(1)%0A%0A # Remove commas%0A table_html = re.sub(r'(?%3C=%5Cd),(?=%5Cd)', '', table_html)%0A%0A table = etree.XML(table_html)%0A rows = iter(table)%0A actual_headers = %5Bcol.text for col in next(rows)%5D%0A # TODO: translate this bullshit ;)%0A headers = %5B'Dried Grain Harvest', 'Dry unhusked', 'Rice Medium', 'Rice Premium', 'Corn', 'Local soybean', 'Local Peanuts', 'Green Beans', 'Cassava', 'Sweet potato', 'Cassava spindles'%5D%0A print %22; %22.join(headers), %22%5Cn%22%0A %0A%0A # Print table%0A for row in rows:%0A if all(v.text is None for v in row):%0A continue%0A print ('''%22%25s%22''') %25 row%5B0%5D.text,%0A for col in row%5B1:%5D:%0A print col.text,%0A print %0A%0Adef parse_date(date_string):%0A %22%22%22Check date%22%22%22%0A%0A match = re.match(r'(%5Cd%7B2%7D)/(%5Cd%7B2%7D)/(%5Cd%7B4%7D)', date_string)%0A if not match:%0A sys.exit(%22ERROR: invalid date%22)%0A day, month, year = int(match.group(1)), int(match.group(2)), int(match.group(3))%0A return datetime.date(year, month, day)%0A%0Adef usage():%0A print usage_str%0A%0Aif __name__ == %22__main__%22:%0A if len(sys.argv) == 1 or sys.argv%5B1%5D in %5B'-h', '--help'%5D:%0A usage()%0A sys.exit(0)%0A%0A date_string = sys.argv%5B1%5D%0A date = parse_date(date_string)%0A download_table(date)%0A%0A
46a842136e93ce21936cbf28950b6f0d358c3359
Fix super cleanUp for fullstack ProcessFixture
neutron/tests/fullstack/fullstack_fixtures.py
neutron/tests/fullstack/fullstack_fixtures.py
# Copyright 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from distutils import spawn import functools import fixtures from neutronclient.common import exceptions as nc_exc from neutronclient.v2_0 import client from oslo_config import cfg from oslo_log import log as logging from oslo_utils import timeutils from neutron.agent.linux import async_process from neutron.agent.linux import utils from neutron.tests.common import net_helpers from neutron.tests.fullstack import config_fixtures LOG = logging.getLogger(__name__) # This should correspond the directory from which infra retrieves log files DEFAULT_LOG_DIR = '/opt/stack/logs' class ProcessFixture(fixtures.Fixture): def __init__(self, name, exec_name, config_filenames): self.name = name self.exec_name = exec_name self.config_filenames = config_filenames self.process = None def setUp(self): super(ProcessFixture, self).setUp() self.start() def start(self): fmt = self.name + "--%Y-%m-%d--%H%M%S.log" cmd = [spawn.find_executable(self.exec_name), '--log-dir', DEFAULT_LOG_DIR, '--log-file', timeutils.strtime(fmt=fmt)] for filename in self.config_filenames: cmd += ['--config-file', filename] self.process = async_process.AsyncProcess(cmd) self.process.start(block=True) def stop(self): self.process.stop(block=True) def cleanUp(self, *args, **kwargs): self.stop() super(ProcessFixture, self, *args, **kwargs) class EnvironmentFixture(fixtures.Fixture): def setUp(self): super(EnvironmentFixture, self).setUp() self.temp_dir = self.useFixture(fixtures.TempDir()).path self.neutron_server = self.useFixture( NeutronServerFixture(self.temp_dir)) def wait_until_env_is_up(self, agents_count=0): utils.wait_until_true( functools.partial(self._processes_are_ready, agents_count)) def _processes_are_ready(self, agents_count): try: running_agents = self.neutron_server.client.list_agents()['agents'] LOG.warn("There are %d agents running!", len(running_agents)) return len(running_agents) == agents_count except nc_exc.NeutronClientException: LOG.warn("neutron-server isn't up yet (cannot contact REST API).") return False class NeutronServerFixture(fixtures.Fixture): NEUTRON_SERVER = "neutron-server" def __init__(self, temp_dir): self.temp_dir = temp_dir def setUp(self): super(NeutronServerFixture, self).setUp() self.neutron_cfg_fixture = config_fixtures.NeutronConfigFixture( self.temp_dir, cfg.CONF.database.connection) self.plugin_cfg_fixture = config_fixtures.ML2ConfigFixture( self.temp_dir) self.useFixture(self.neutron_cfg_fixture) self.useFixture(self.plugin_cfg_fixture) self.neutron_config = self.neutron_cfg_fixture.config self.plugin_config = self.plugin_cfg_fixture.config config_filenames = [self.neutron_cfg_fixture.filename, self.plugin_cfg_fixture.filename] self.process_fixture = self.useFixture(ProcessFixture( name=self.NEUTRON_SERVER, exec_name=self.NEUTRON_SERVER, config_filenames=config_filenames)) utils.wait_until_true(self.server_is_live) def server_is_live(self): try: self.client.list_networks() return True except nc_exc.NeutronClientException: LOG.warn("neutron-server isn't up yet (cannot contact REST API).") return False @property def client(self): url = "http://127.0.0.1:%s" % self.neutron_config.DEFAULT.bind_port return client.Client(auth_strategy="noauth", endpoint_url=url) class OVSAgentFixture(fixtures.Fixture): NEUTRON_OVS_AGENT = "neutron-openvswitch-agent" def __init__(self, neutron_cfg_fixture, ml2_cfg_fixture): self.neutron_cfg_fixture = neutron_cfg_fixture self.plugin_cfg_fixture = ml2_cfg_fixture self.neutron_config = self.neutron_cfg_fixture.config self.plugin_config = self.plugin_cfg_fixture.config def setUp(self): super(OVSAgentFixture, self).setUp() self.useFixture(net_helpers.OVSBridgeFixture(self._get_br_int_name())) self.useFixture(net_helpers.OVSBridgeFixture(self._get_br_phys_name())) config_filenames = [self.neutron_cfg_fixture.filename, self.plugin_cfg_fixture.filename] self.process_fixture = self.useFixture(ProcessFixture( name=self.NEUTRON_OVS_AGENT, exec_name=self.NEUTRON_OVS_AGENT, config_filenames=config_filenames)) def _get_br_int_name(self): return self.plugin_config.ovs.integration_bridge def _get_br_phys_name(self): return self.plugin_config.ovs.bridge_mappings.split(':')[1] class L3AgentFixture(fixtures.Fixture): NEUTRON_L3_AGENT = "neutron-l3-agent" def __init__(self, temp_dir, neutron_cfg_fixture, integration_bridge_name): self.temp_dir = temp_dir self.neutron_cfg_fixture = neutron_cfg_fixture self.neutron_config = self.neutron_cfg_fixture.config self.integration_bridge_name = integration_bridge_name def setUp(self): super(L3AgentFixture, self).setUp() self.plugin_cfg_fixture = config_fixtures.L3ConfigFixture( self.temp_dir, self.integration_bridge_name) self.useFixture(self.plugin_cfg_fixture) self.plugin_config = self.plugin_cfg_fixture.config self.useFixture(net_helpers.OVSBridgeFixture(self._get_br_ex_name())) config_filenames = [self.neutron_cfg_fixture.filename, self.plugin_cfg_fixture.filename] self.process_fixture = self.useFixture(ProcessFixture( name=self.NEUTRON_L3_AGENT, exec_name=self.NEUTRON_L3_AGENT, config_filenames=config_filenames)) def _get_br_ex_name(self): return self.plugin_config.DEFAULT.external_network_bridge
Python
0.000001
@@ -2072,26 +2072,34 @@ ixture, self -, +).cleanUp( *args, **kwa
6c524df7f0e1240237a1cd53858e6eafed8a70a5
Define liblouis_nacl nexe for MIPS
third_party/liblouis/liblouis_nacl.gyp
third_party/liblouis/liblouis_nacl.gyp
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'variables': { 'braille_test_data_dir': '<(PRODUCT_DIR)/chromevox_test_data/braille', 'braille_chromevox_dir': '<(PRODUCT_DIR)/resources/chromeos/chromevox/chromevox/background/braille', 'table_files': [ '>!@pymod_do_main(liblouis_list_tables -D overrides/tables -D src/tables tables.json)', ], }, # x86 targets build both 32 and 64 bit binaries by default. We only need # the one that matches our target architecture. 'target_defaults': { 'conditions': [ ['target_arch=="ia32"', { 'variables': { 'enable_x86_64': 0, }, }], ['target_arch=="x64"', { 'variables': { 'enable_x86_32': 0, }, }], ], }, 'includes': [ '../../build/common_untrusted.gypi', ], 'conditions': [ ['disable_nacl==0 and disable_nacl_untrusted==0', { 'targets': [ { 'target_name': 'liblouis_nacl', 'type': 'none', 'variables': { 'nacl_untrusted_build': 1, 'nlib_target': 'liblouis_nacl.a', 'build_newlib': 1, }, 'compile_flags': [ '-Wno-switch', '-Wno-unused-but-set-variable', ], 'include_dirs': [ 'overrides/liblouis', 'src/liblouis', '.', ], 'direct_dependent_settings': { 'include_dirs': [ 'overrides', ], }, 'sources': [ 'overrides/liblouis/config.h', 'overrides/liblouis/liblouis.h', 'overrides/liblouis/compileTranslationTable.c', 'src/liblouis/lou_backTranslateString.c', 'src/liblouis/lou_translateString.c', 'src/liblouis/transcommon.ci', 'src/liblouis/wrappers.c', ], 'dependencies': [ '../../native_client/tools.gyp:prep_toolchain', ], }, { 'target_name': 'liblouis_nacl_wrapper_nacl', 'type': 'none', 'variables': { 'nacl_untrusted_build': 1, 'nexe_target': 'liblouis_nacl', 'out_newlib64': '<(braille_test_data_dir)/>(nexe_target)_x86_64.nexe', 'out_newlib32': '<(braille_test_data_dir)/>(nexe_target)_x86_32.nexe', 'out_newlib_arm': '<(braille_test_data_dir)/>(nexe_target)_arm.nexe', 'build_newlib': 1, 'extra_args': [ '--strip-debug', ], 'nmf': '<(braille_test_data_dir)/>(nexe_target).nmf', 'target_conditions': [ ['enable_x86_64==1', { 'nexe_files': ['>(out_newlib64)'], }], ['enable_x86_32==1', { 'nexe_files': ['>(out_newlib32)'], }], ['enable_arm==1', { 'nexe_files': ['>(out_newlib_arm)'], }], ], }, 'sources': [ 'nacl_wrapper/liblouis_instance.h', 'nacl_wrapper/liblouis_instance.cc', 'nacl_wrapper/liblouis_module.h', 'nacl_wrapper/liblouis_module.cc', 'nacl_wrapper/liblouis_wrapper.h', 'nacl_wrapper/liblouis_wrapper.cc', 'nacl_wrapper/translation_params.h', 'nacl_wrapper/translation_result.h', ], 'link_flags': [ '-lppapi', '-lppapi_cpp', '-llouis_nacl', '-ljsoncpp_nacl', '-lpthread', '-lnacl_io', ], 'dependencies': [ '../../native_client/src/untrusted/nacl/nacl.gyp:nacl_lib', '../../native_client/tools.gyp:prep_toolchain', '../../native_client_sdk/native_client_sdk_untrusted.gyp:nacl_io_untrusted', '../../ppapi/native_client/native_client.gyp:ppapi_lib', '../../ppapi/ppapi_nacl.gyp:ppapi_cpp_lib', '../jsoncpp/jsoncpp_nacl.gyp:jsoncpp_nacl', 'liblouis_nacl', ], 'actions': [ { 'action_name': 'Generate NEWLIB NMF', 'inputs': [ '>@(nexe_files)', ], 'outputs': ['>(nmf)'], 'action': [ 'python', '<(DEPTH)/native_client_sdk/src/tools/create_nmf.py', '>@(_inputs)', '--output=>(nmf)', ], }, ], # Copy specific files into the product directory to avoid # copying over the unstripped binary file. 'copies': [ { 'destination': '<(braille_chromevox_dir)', 'files': [ '<(nmf)', '>@(nexe_files)', 'tables.json', ], }, { 'destination': '<(braille_chromevox_dir)/tables', 'files': [ '<@(table_files)', ], }, ], }, { 'target_name': 'liblouis_test_data', 'type': 'none', 'variables': { 'test_extension_dir': '<(DEPTH)/chrome/test/data/chromeos/liblouis_nacl', }, 'dependencies': [ 'liblouis_nacl_wrapper_nacl', ], 'copies': [ { 'destination': '<(braille_test_data_dir)', 'files': [ 'tables.json', '<(test_extension_dir)/manifest.json', '<(test_extension_dir)/test.js', ], }, { 'destination': '<(braille_test_data_dir)/tables', 'files': [ '<@(table_files)', ], }, ], }, ], }], ], }
Python
0.997255
@@ -2541,32 +2541,118 @@ get)_arm.nexe',%0A + 'out_newlib_mips': '%3C(braille_test_data_dir)/%3E(nexe_target)_mips32.nexe',%0A 'bui @@ -3150,32 +3150,139 @@ %7D%5D,%0A + %5B'enable_mips==1', %7B%0A 'nexe_files': %5B'%3E(out_newlib_mips)'%5D,%0A %7D%5D,%0A %5D,%0A
4fdf2c32bcd937ba2fc21dbaad8a81620c02fb17
Fix part of #5134: Add test for core.storage.config.gae_models (#5565)
core/storage/config/gae_models_test.py
core/storage/config/gae_models_test.py
Python
0.000001
@@ -0,0 +1,1662 @@ +# coding: utf-8%0A#%0A# Copyright 2018 The Oppia Authors. All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS-IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Afrom core.platform import models%0Afrom core.tests import test_utils%0Aimport feconf%0A%0A(config_models,) = models.Registry.import_models(%5Bmodels.NAMES.config%5D)%0A%0A%0Aclass ConfigPropertyModelUnitTests(test_utils.GenericTestBase):%0A %22%22%22Test ConfigPropertyModel class.%22%22%22%0A%0A def test_create_model(self):%0A config_model = config_models.ConfigPropertyModel(%0A value='b')%0A self.assertEqual(config_model.value, 'b')%0A%0A def test_commit(self):%0A config_model1 = config_models.ConfigPropertyModel(%0A id='config_model1', value='c')%0A config_model1.commit(feconf.SYSTEM_COMMITTER_ID, %5B%5D)%0A retrieved_model1 = config_models.ConfigPropertyModel.get_version(%0A 'config_model1', 1)%0A self.assertEqual(retrieved_model1.value, 'c')%0A retrieved_model1.value = 'd'%0A retrieved_model1.commit(feconf.SYSTEM_COMMITTER_ID, %5B%5D)%0A retrieved_model2 = config_models.ConfigPropertyModel.get_version(%0A 'config_model1', 2)%0A self.assertEqual(retrieved_model2.value, 'd')%0A
65969d0251dc5031328132cf2043f1f76ee90d72
Include the demo as a separate file
demo.py
demo.py
Python
0
@@ -0,0 +1,2398 @@ +%0Aimport sys, curses%0A%0Afrom cwidgets import *%0Afrom cwidgets import _LOG%0A%0Adef demo(window):%0A # Create the root of the widget hierarchy.%0A root = WidgetRoot(window)%0A # Wrap the UI in a Viewport to avoid crashes at small resolutions.%0A vp = root.add(Viewport())%0A # Push the UI together to avoid spreading everyting over the screen.%0A cont = vp.add(AlignContainer())%0A # The user-visible %22window%22; with a border and the bottom line pushed%0A # inside by one line height.%0A win = cont.add(MarginContainer(border=True, insets=(0, 0, 1, 0)))%0A # Decoratively enclose the title%0A title_wrapper = win.add(TeeContainer(), slot=MarginContainer.POS_TOP)%0A # Add the title%0A title = title_wrapper.add(Label('cwidgets demo'))%0A # Add the content. This could also be a nested Viewport containing%0A # a more complex UI.%0A # When text is typed into the entry box, it will increase smoothly (along%0A # with the remaining UI) until it's 70 columns or 20 rows (because of the%0A # multiline setting, it can have multiple lines) large, then, it will not%0A # grow further (along the corresponding axis), and scroll instead.%0A content = win.add(EntryBox('Lorem ipsum dolor sit amet', multiline=True,%0A cmaxsize=(70, 20)))%0A # Bind a vertical scrollbar to the content%0A sbv = win.add(content.bind(Scrollbar(Scrollbar.DIR_VERTICAL)),%0A slot=MarginContainer.POS_RIGHT)%0A # The bottom contains a line of buttons stacked below a scrollbar.%0A bottom = win.add(VerticalContainer(), slot=MarginContainer.POS_BOTTOM)%0A # Add the horizontal scrollbar.%0A sbh = bottom.add(content.bind(Scrollbar(Scrollbar.DIR_HORIZONTAL)))%0A # The buttons are laid out horizontally.%0A buttons = bottom.add(HorizontalContainer())%0A # A bare Widget as %22glue%22 to fill the space. An AlignContainer would%0A # have been possible as well.%0A buttons.add(Widget(), weight=1)%0A # The first button%0A buttons.add(Button('OK', sys.exit))%0A # A little spacer between the buttons%0A buttons.add(Widget(cminsize=(1, 1)))%0A # The second button%0A buttons.add(Button('Cancel', lambda: sys.exit(1)))%0A # Another glue%0A buttons.add(Widget(), weight=1)%0A # Run it.%0A root.main()%0A%0Atry:%0A init()%0A curses.wrapper(demo)%0Afinally:%0A if _LOG:%0A _LOG.append('')%0A sys.stderr.write('%5Cn'.join(map(str, _LOG)))%0A sys.stderr.flush()%0A
3968c53c4577b2efe9ef3cd2de76b688a26517d9
Add gpio example
chapter2/gpio.py
chapter2/gpio.py
Python
0.000001
@@ -0,0 +1,124 @@ +import RPi.GPIO as GPIO%0AGPIO.setmode(GPIO.BOARD)%0AGPIO.setup(5, GPIO.OUT)%0AGPIO.output(5, GPIO.HIGH)%0AGPIO.output(5, GPIO.LOW)%0A
dc0bb07da52fd11a7980b9f36c38fcdb7f9c6ba5
Add `edit.py` to be able to edit a view asynchronously
edit.py
edit.py
Python
0.000001
@@ -0,0 +1,1839 @@ +# edit.py%0A# buffer editing for both ST2 and ST3 that %22just works%22%0A%0Aimport sublime%0Aimport sublime_plugin%0Afrom collections import defaultdict%0A%0Atry:%0A sublime.edit_storage%0Aexcept AttributeError:%0A sublime.edit_storage = %7B%7D%0A%0Aclass EditStep:%0A def __init__(self, cmd, *args):%0A self.cmd = cmd%0A self.args = args%0A%0A def run(self, view, edit):%0A if self.cmd == 'callback':%0A return self.args%5B0%5D(view, edit)%0A%0A funcs = %7B%0A 'insert': view.insert,%0A 'erase': view.erase,%0A 'replace': view.replace,%0A %7D%0A func = funcs.get(self.cmd)%0A if func:%0A func(edit, *self.args)%0A%0A%0Aclass Edit:%0A defer = defaultdict(dict)%0A%0A def __init__(self, view):%0A self.view = view%0A self.steps = %5B%5D%0A%0A def step(self, cmd, *args):%0A step = EditStep(cmd, *args)%0A self.steps.append(step)%0A%0A def insert(self, point, string):%0A self.step('insert', point, string)%0A%0A def erase(self, region):%0A self.step('erase', region)%0A%0A def replace(self, region, string):%0A self.step('replace', region, string)%0A%0A def callback(self, func):%0A self.step('callback', func)%0A%0A def run(self, view, edit):%0A for step in self.steps:%0A step.run(view, edit)%0A%0A def __enter__(self):%0A return self%0A%0A def __exit__(self, type, value, traceback):%0A view = self.view%0A if sublime.version().startswith('2'):%0A edit = view.begin_edit()%0A self.run(edit)%0A view.end_edit(edit)%0A else:%0A key = str(hash(tuple(self.steps)))%0A sublime.edit_storage%5Bkey%5D = self.run%0A view.run_command('apply_edit', %7B'key': key%7D)%0A%0A%0Aclass apply_edit(sublime_plugin.TextCommand):%0A def run(self, edit, key):%0A sublime.edit_storage.pop(key)(self.view, edit)%0A
a795d94a9c885b97ab5bffc313524ae46626d556
Add simple function-size analysis tool.
tools/analyze_code_size.py
tools/analyze_code_size.py
Python
0
@@ -0,0 +1,2189 @@ +%0Aimport os%0Aimport re%0Aimport sys%0Aimport optparse%0A%0AMARKER_START_FUNCS = %22// EMSCRIPTEN_START_FUNCS%22%0AMARKER_END_FUNCS = %22// EMSCRIPTEN_END_FUNCS%22%0A%0AFUNCTION_CODE_RE = re.compile(%0A r%22function (?P%3Cname%3E%5Ba-zA-Z0-9_%5D+)(?P%3Cdefn%3E.*?)((?=function)%7C(?=$))%22%0A)%0A%0A%0Adef analyze_code_size(fileobj, opts):%0A funcs = %7B%7D%0A name_re = None%0A if opts.grep is not None:%0A name_re = re.compile(opts.grep, re.I)%0A # Split out and analyze the code for each individual function.%0A # XXX TODO: read incrementally to reduce memory usage.%0A data = fileobj.read()%0A pre_code, data = data.split(MARKER_START_FUNCS, 1)%0A data, post_code = data.split(MARKER_END_FUNCS, 1)%0A for match in FUNCTION_CODE_RE.finditer(data):%0A name = match.group(%22name%22)%0A defn = match.group(%22defn%22)%0A if name_re and not name_re.search(name):%0A continue%0A funcs%5Bname%5D = FunctionMetrics(name, defn)%0A # Print summary metrics.%0A total = 0%0A funcs_by_size = ((f.size, f.name) for f in funcs.itervalues())%0A for (size, name) in sorted(funcs_by_size, reverse=True):%0A print size, name, human_readable(size)%0A total += size%0A print %22Total size:%22, total, human_readable(total)%0A%0A%0Aclass FunctionMetrics(object):%0A%0A def __init__(self, name, defn):%0A self.name = name%0A self.defn = defn%0A self.size = len(defn)%0A%0A%0Adef human_readable(size):%0A units = ((1024*1024, %22M%22), (1024, %22k%22))%0A for (scale, unit) in units:%0A scale = float(scale)%0A if size / scale %3E 0.1:%0A return %22(%25.2f%25s)%22 %25 (size / scale, unit)%0A return %22%22%0A %0A%0A%0Adef main(args=None):%0A usage = %22usage: %25prog %5Boptions%5D file%22%0A descr = %22Analyze code size and complexity for emscripten-compiled output%22%0A parser = optparse.OptionParser(usage=usage, description=descr)%0A parser.add_option(%22-g%22, %22--grep%22, metavar=%22REGEXP%22,%0A help=%22only analyze functions matching this regexp%22)%0A%0A opts, args = parser.parse_args(args)%0A with open(args%5B0%5D, %22r%22) as infile:%0A analyze_code_size(infile, opts)%0A return 0%0A%0A%0Aif __name__ == %22__main__%22:%0A try:%0A exitcode = main()%0A except KeyboardInterrupt:%0A exitcode = 1%0A sys.exit(exitcode)%0A
2ce7bcdd6606cb1590febf6430a7635462b09d74
fix #61: prefer configuration files under script dir
lixian_config.py
lixian_config.py
import os import os.path def get_config_path(filename): if os.path.exists(filename): return filename user_home = os.getenv('USERPROFILE') or os.getenv('HOME') lixian_home = os.getenv('LIXIAN_HOME') or user_home return os.path.join(lixian_home, filename) LIXIAN_DEFAULT_CONFIG = get_config_path('.xunlei.lixian.config') LIXIAN_DEFAULT_COOKIES = get_config_path('.xunlei.lixian.cookies') def load_config(path): values = {} if os.path.exists(path): with open(path) as x: for line in x.readlines(): line = line.strip() if line: if line.startswith('--'): line = line.lstrip('-') if line.startswith('no-'): values[line[3:]] = False elif '=' in line: k, v = line.split('=', 1) values[k] = v else: values[line] = True else: raise NotImplementedError(line) return values def dump_config(path, values): with open(path, 'w') as x: for k in values: v = values[k] if v is True: x.write('--%s\n'%k) elif v is False: x.write('--no-%s\n'%k) else: x.write('--%s=%s\n'%(k, v)) class Config: def __init__(self, path=LIXIAN_DEFAULT_CONFIG): self.path = path self.values = load_config(path) def put(self, k, v=True): self.values[k] = v dump_config(self.path, self.values) def get(self, k, v=None): return self.values.get(k, v) def delete(self, k): if k in self.values: del self.values[k] dump_config(self.path, self.values) def source(self): if os.path.exists(self.path): with open(self.path) as x: return x.read() def __str__(self): return '<Config{%s}>' % self.values global_config = Config() def put_config(k, v=True): if k.startswith('no-') and v is True: k = k[3:] v = False global_config.put(k, v) def get_config(k, v=None): return global_config.get(k, v) def delete_config(k): return global_config.delete(k) def source_config(): return global_config.source()
Python
0
@@ -99,16 +99,130 @@ ilename%0A +%09import sys%0A%09local_path = os.path.join(sys.path%5B0%5D, filename)%0A%09if os.path.exists(local_path):%0A%09%09return local_path%0A %09user_ho
dfe2bd52fd2e561a79c91d4ff34fbead8a26c1c3
Create init.py
init.py
init.py
Python
0.000001
@@ -0,0 +1,2315 @@ +#!/usr/bin/env python%0Aimport sys%0Aimport os%0Aimport psycopg2%0A%0Adef dump_table(table_name, conn):%0A query = %22SELECT * FROM %22+table_name+%22 LIMIT 1%22%0A cur = conn.cursor()%0A cur.execute(query)%0A rows = cur.fetchall()%0A description = cur.description%0A columns = %22'INSERT INTO %22+table_name+%22 VALUES ('%22%0A for desc in description:%0A columns += %22%7C%7CCASE WHEN %22+desc.name+%22 IS NULL THEN 'NULL' ELSE ''''%7C%7C%22+desc.name+%22::VARCHAR%7C%7C'''' END %7C%7C','%22%0A columns = columns%5B0:len(columns)-3%5D%0A columns += %22')'%22%0A print %22SELECT %22+columns+%22 FROM %22+table_name%0A%0Adef update_flex_version(vl_flex_version, hostname, conn):%0A if (hostname == %22alpha%22):%0A hostname = %22alpha-asset.valebroker.com.br%22%0A else:%0A hostname = %22alpha-asset-%22+hostname+%22.valebroker.com.br%22%0A cur = conn.cursor()%0A cur.execute(%22UPDATE tb_contract_host SET vl_flex_version = %25s WHERE hostname = %25s%22, (vl_flex_version, hostname))%0A conn.commit()%0A print %22Host %22+hostname+%22 updated to Flex version %22+vl_flex_version%0A%0Adef show_error(conn):%0A cur = conn.cursor()%0A cur.execute(%22SELECT stack_trace, detail FROM tb_log_error WHERE id_investor = 5801 ORDER BY dt_error DESC LIMIT 1%22)%0A rows = cur.fetchall()%0A print rows%5B0%5D%5B0%5D%0A print rows%5B0%5D%5B1%5D%0A%0Adef get_connection():%0A postgres_database = os.environ%5B'postgres_database'%5D%0A postgres_user = os.environ%5B'postgres_user'%5D%0A postgres_password = os.environ%5B'postgres_password'%5D%0A postgres_host = os.environ%5B'postgres_host'%5D%0A postgres_port = os.environ%5B'postgres_port'%5D%0A return psycopg2.connect(database=postgres_database, user=postgres_user, password=postgres_password, host=postgres_host, port=postgres_port)%0A%0A# def set_enviroment_vars():%0A# f = open('/tmp/envs.conf')%0A# for line in f:%0A%0Adef init(args):%0A conn = get_connection()%0A # docker-compose up%0A if (os.environ%5B'action'%5D == %22dump_table%22):%0A # docker-compose run dump_table tb_asset_operation%0A dump_table(args%5B0%5D, conn)%0A if (os.environ%5B'action'%5D == %22update_flex_version%22):%0A # docker-compose run update_flex_version 4324 alpha/rf/support%0A update_flex_version(args%5B0%5D, args%5B1%5D, conn)%0A if (os.environ%5B'action'%5D == %22show_error%22):%0A # docker-compose run show_error%0A show_error(conn)%0A conn.close()%0A%0Aif __name__ == %22__main__%22:%0A init(sys.argv%5B1:%5D)%0A
bfaeeec3f5f5582822e2918491090815a606ba44
Add test to make sure imports and __all__ matches
test/test_api.py
test/test_api.py
Python
0.000037
@@ -0,0 +1,271 @@ +# -*- coding: utf-8 -*-%0A%0Aimport warthog.api%0A%0A%0Adef test_public_exports():%0A exports = set(%5Bitem for item in dir(warthog.api) if not item.startswith('_')%5D)%0A declared = set(warthog.api.__all__)%0A assert exports == declared, 'Exports and __all__ members should match'%0A
48857638694ceca08c64d7b9c6825e2178c53279
Add function decorator to improve functools.wraps
pylearn2/utils/doc.py
pylearn2/utils/doc.py
Python
0.000001
@@ -0,0 +1,2763 @@ +%22%22%22%0ADocumentation-related helper classes/functions%0A%22%22%22%0A%0A%0Aclass soft_wraps:%0A %22%22%22%0A A Python decorator which concatenates two functions' docstrings: one%0A function is defined at initialization and the other one is defined when%0A soft_wraps is called.%0A%0A This helps reduce the ammount of documentation to write: one can use%0A this decorator on child classes' functions when their implementation is%0A similar to the one of the parent class. Conversely, if a function defined%0A in a child class departs from its parent's implementation, one can simply%0A explain the differences in a 'Notes' section without re-writing the whole%0A docstring.%0A%0A Examples%0A --------%0A %3E%3E%3E class Parent(object):%0A ... def f(x):%0A ... '''%0A ... Adds 1 to x%0A ... %0A ... Parameters%0A ... ----------%0A ... x : int%0A ... Variable to increment by 1%0A ...%0A ... Returns%0A ... -------%0A ... rval : int%0A ... x incremented by 1%0A ... '''%0A ... rval = x + 1%0A ... return rval%0A ...%0A %3E%3E%3E class Child(Parent):%0A ... @soft_wraps(Parent.f)%0A ... def f(x):%0A ... '''%0A ... Notes%0A ... -----%0A ... Also prints the incremented value%0A ... '''%0A ... rval = x + 1%0A ... print rval%0A ... return rval%0A ...%0A %3E%3E%3E c = Child()%0A %3E%3E%3E print c.f.__doc__%0A%0A Adds 1 to x%0A %0A Parameters%0A ----------%0A x : int%0A Variable to increment by 1%0A %0A Returns%0A -------%0A rval : int%0A x incremented by 1%0A %0A Notes%0A -----%0A Also prints the incremented value%0A %22%22%22%0A%0A def __init__(self, f, append=False):%0A %22%22%22%0A Parameters%0A ----------%0A f : function%0A Function whose docstring will be concatenated with the decorated%0A function's docstring%0A prepend : bool, optional%0A If True, appends f's docstring to the decorated function's%0A docstring instead of prepending it. Defaults to False.%0A %22%22%22%0A self.f = f%0A self.append = append%0A%0A def __call__(self, f):%0A %22%22%22%0A Prepend self.f's docstring to f's docstring (or append it if%0A %60self.append == True%60).%0A%0A Parameters%0A ----------%0A f : function%0A Function to decorate%0A%0A Returns%0A -------%0A f : function%0A Function f passed as argument with self.f's docstring%0A %7Bpre,ap%7Dpended to it%0A %22%22%22%0A if self.append:%0A f.__doc__ += + self.f.__doc__%0A else:%0A f.__doc__ = self.f.__doc__ + f.__doc__%0A%0A return f%0A
dfca9c3d7dbbe97516a24bea89b917f7282c7dc7
Add problem rotate image
python/rotateImage.py
python/rotateImage.py
Python
0.000003
@@ -0,0 +1,902 @@ +# https://leetcode.com/problems/rotate-image/%0A%0Aclass Solution(object):%0A def rotate(self, matrix):%0A %22%22%22%0A :type matrix: List%5BList%5Bint%5D%5D%0A :rtype: void Do not return anything, modify matrix in-place instead.%0A %22%22%22%0A size = len(matrix)%0A for i in xrange(0, size/2):%0A for j in xrange(i, size-1-i):%0A t = matrix%5Bi%5D%5Bj%5D%0A matrix%5Bi%5D%5Bj%5D = matrix%5Bsize-j-1%5D%5Bi%5D%0A matrix%5Bsize-j-1%5D%5Bi%5D = matrix%5Bsize-i-1%5D%5Bsize-j-1%5D%0A matrix%5Bsize-i-1%5D%5Bsize-j-1%5D = matrix%5Bj%5D%5Bsize-i-1%5D%0A matrix%5Bj%5D%5Bsize-i-1%5D= t%0A%0A%0Amatrix = %5B%0A %5B1, 2, 3, 4, 5%5D,%0A %5B6, 7, 8, 9, 10%5D,%0A %5B11, 12, 13, 14, 15%5D,%0A %5B16, 17, 18, 19, 20%5D,%0A %5B21, 22, 23, 24, 25%5D,%0A%5D%0As = Solution()%0As.rotate(matrix)%0Aprint matrix%0A%0Amatrix = %5B%0A %5B1, 2, 3, 4%5D,%0A %5B5, 6, 7, 8%5D,%0A %5B9, 10, 11, 12%5D,%0A %5B13, 14, 15, 16%5D,%0A%5D%0As.rotate(matrix)%0Aprint matrix%0A
cce6a4c2efe62c267b04f6ce75019d577428e2c9
add sensu_check_dict module
library/sensu_check_dict.py
library/sensu_check_dict.py
Python
0.000001
@@ -0,0 +1,2951 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0A# Copyright 2014, Blue Box Group, Inc.%0A# Copyright 2014, Craig Tracey %[email protected]%3E%0A# Copyright 2016, Paul Czarkowski %[email protected]%3E%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A%0Aimport os%0Aimport traceback%0A%0Afrom hashlib import md5%0Afrom jinja2 import Environment%0A%0Adef main():%0A%0A module = AnsibleModule(%0A argument_spec=dict(%0A name=dict(default=None, required=True),%0A check_dir=dict(default='/etc/sensu/conf.d/checks', required=False),%0A state=dict(default='present', required=False, choices=%5B'present','absent'%5D),%0A check=dict(type='dict', required=True)%0A )%0A )%0A%0A if module.params%5B'state'%5D == 'present':%0A try:%0A changed = False%0A check_path = '%25s/%25s.json' %25 (module.params%5B'check_dir'%5D, module.params%5B'name'%5D)%0A check=dict(%7B%0A 'checks': %7B%0A module.params%5B'name'%5D: module.params%5B'check'%5D%0A %7D%0A %7D)%0A%0A%0A if os.path.isfile(check_path):%0A with open(check_path) as fh:%0A if json.load(fh) == check:%0A module.exit_json(changed=False, result=%22ok%22)%0A else:%0A with open(check_path, %22w%22) as fh:%0A fh.write(json.dumps(check, indent=4))%0A module.exit_json(changed=True, result=%22changed%22)%0A else:%0A with open(check_path, %22w%22) as fh:%0A fh.write(json.dumps(check, indent=4))%0A module.exit_json(changed=True, result=%22created%22)%0A except Exception as e:%0A formatted_lines = traceback.format_exc()%0A module.fail_json(msg=%22creating the check failed: %25s %25s%22 %25 (e,formatted_lines))%0A%0A else:%0A try:%0A changed = False%0A check_path = '%25s/%25s.json' %25 (module.params%5B'check_dir'%5D, module.params%5B'name'%5D)%0A if os.path.isfile(check_path):%0A os.remove(check_path)%0A module.exit_json(changed=True, result=%22changed%22)%0A else:%0A module.exit_json(changed=False, result=%22ok%22)%0A except Exception as e:%0A formatted_lines = traceback.format_exc()%0A module.fail_json(msg=%22removing the check failed: %25s %25s%22 %25 (e,formatted_lines))%0A%0A# this is magic, see lib/ansible/module_common.py%0Afrom ansible.module_utils.basic import *%0Amain()%0A
97e87a73d4161f7bde4fc9843bc1010ee6e064d7
Add doc-string to default init
learning/model.py
learning/model.py
#!/usr/bin/env python from __future__ import division import logging from six import iteritems from inspect import isfunction from collections import OrderedDict from recordtype import recordtype import numpy as np import theano import theano.tensor as T _logger = logging.getLogger(__name__) floatX = theano.config.floatX #------------------------------------------------------------------------------ def default_weights(n_in, n_out): scale = np.sqrt(6) / np.sqrt(n_in+n_out) return scale*(2*np.random.uniform(size=(n_in, n_out))-1) / n_in #------------------------------------------------------------------------------ HyperParam = recordtype('HyperParam', 'value name default help') ModelParam = recordtype('ModelParam', 'value name default help') #------------------------------------------------------------------------------ class Model(object): initialized = False def __init__(self, **hyper_params): self._model_params = OrderedDict() self._hyper_params = OrderedDict() self.initialized = True def _ensure_init(self): if not self.initialized: raise ValueError("Model base class not initialized yet!" "Call Model.__init__() before doing anything else!") def register_hyper_param(self, key, default=None, help=None): self._ensure_init() if self._hyper_params.has_key(key): raise ValueError('A hyper parameter named "%s" already exists' % key) if self._model_params.has_key(key): raise ValueError('A model parameter named "%s" already exists' % key) self._hyper_params[key] = HyperParam(name=key, value=None, default=default, help=help) def register_model_param(self, key, default=None, help=None): self._ensure_init() if self._hyper_params.has_key(key): raise ValueError('A hyper parameter named "%s" already exists' % key) if self._model_params.has_key(key): raise ValueError('A model parameter named "%s" already exists' % key) self._model_params[key] = ModelParam(name=key, value=None, default=default, help=help) #-------------------------------------------------------------------------- def get_hyper_param(self, key): """ Return the value of a predefined hyper parameter. """ param = self._hyper_params.get(key, None) if param is None: raise ValueError('Trying to access unknown hyper parameter "%s"' % key) if param.value is None: if isfunction(param.default): self.set_hyper_param(key, param.default()) else: self.set_hyper_param(key, param.default) return param.value def get_hyper_params(self, keys=None): """ """ if keys is None: keys = self._hyper_params.keys() return {k: self.get_hyper_param(k) for k in keys} else: return [self.get_hyper_param(k) for k in keys] def set_hyper_param(self, key, val=None): param = self._hyper_params.get(key, None) if param is None: raise ValueError('Trying to set unknown hyper parameter "%s"' % key) param.value = val def set_hyper_params(self, d): for key, val in iteritems(d): self.set_hyper_param(key, val) #------------------------------------------------------------------------ def get_model_param(self, key): """ Return the value of a predefined model parameter. """ param = self._model_params.get(key, None) if param is None: raise ValueError('Trying to access unknown model parameter "%s"' % key) if param.value is None: if isfunction(param.default): self.set_model_param(key, param.default()) else: self.set_model_param(key, param.default) return param.value def get_model_params(self, keys=None): """ """ if keys is None: keys = self._model_params.keys() return OrderedDict( [(k, self.get_model_param(k)) for k in keys] ) else: return [self.get_model_param(k) for k in keys] def set_model_param(self, key, val=None): param = self._model_params.get(key, None) if param is None: raise ValueError('Trying to set unknown model parameter "%s"' % key) if not isinstance(val, T.sharedvar.SharedVariable): if not isinstance(val, np.ndarray): val = np.asarray(val) if val.dtype == np.float: val = np.asarray(val, dtype=floatX) val = theano.shared(val, key) val.tag.test_value = val param.value = val def set_model_params(self, d): for key, val in iteritems(d): self.set_model_param(key, val) #------------------------------------------------------------------------ def __getattr__(self, name): if not self.initialized: raise AttributeError("'%s' object has no attribute '%s'" % (repr(self), name)) if name in self._model_params: return self.get_model_param(name) if name in self._hyper_params: return self.get_hyper_param(name) raise AttributeError("'%s' object has no attribute '%s'" % (repr(self), name)) def __setattr__(self, name, value): if not self.initialized: return object.__setattr__(self, name, value) if name in self._model_params: return self.set_model_param(name, value) if name in self._hyper_params: return self.set_hyper_param(name, value) return object.__setattr__(self, name, value) #------------------------------------------------------------------------ def model_params_from_dlog(self, dlog, row=-1): """ Load the model params form an open H5 file """ for key, param in iteritems(self._model_params): assert isinstance(param, ModelParam) value = dlog.load(key, row) shvar = para.value shvar.set_value(value) def model_params_to_dlog(self, dlog): """ Append all model params to dlog """ for key, param in iteritems(self._model_params): assert isinstance(param, HyperParam) shvar = param.value value = shvar.get_value() dlog.append(key, value) def hyper_params_from_dlog(self, dlog, row=-1): """ Load the hyper params form an open H5 file """ for key, param in iteritems(self._hyper_params): assert isinstance(param, HyperParam) value = dlog.load(key, row) self.set_hyper_param(key, value) def hyper_params_to_dlog(self, dlog): """ Append all hyper params to dlog """ for key, param in iteritems(self._hyper_params): assert isinstance(param, ModelParam) shvar = param.value value = shvar.get_value() dlog.append(key, value) #------------------------------------------------------------------------------
Python
0.000001
@@ -436,24 +436,159 @@ in, n_out):%0A + %22%22%22 Return a n_in * n_out shaped matrix with uniformly sampled elements %0A between - and + sqrt(6)/sqrt(n_in+n_out).%0A %22%22%22%0A scale =
577b84cf124a35b49311e39ab4d40ef0f8af59ed
introduce proso.analysis module
proso/analysis.py
proso/analysis.py
Python
0
@@ -0,0 +1,1132 @@ +import json%0Aimport hashlib%0Aimport os%0A%0A%0Adef get_experiment_data(name, compute_fun, cache_dir, cached=True, **kwargs):%0A kwargs_hash = hashlib.sha1(json.dumps(kwargs, sort_keys=True)).hexdigest()%0A filename = '%7B%7D/%7B%7D.json'.format(cache_dir, name);%0A if cached and os.path.exists(filename):%0A with open(filename, 'r') as f:%0A return _convert_json_keys(json.loads(f.read()))%0A result = compute_fun(**kwargs)%0A if cached:%0A if not os.path.exists(cache_dir):%0A os.makedirs(cache_dir)%0A with open(filename, 'w') as f:%0A f.write(json.dumps(result, sort_keys=True))%0A return result%0A%0A%0Adef _convert_json_keys(json_struct):%0A if isinstance(json_struct, list):%0A return map(_convert_json_keys, json_struct)%0A elif isinstance(json_struct, dict):%0A return %7B_maybe_convert_str(key): val for (key, val) in json_struct.iteritems()%7D%0A else:%0A return json_struct%0A%0A%0Adef _maybe_convert_str(x):%0A if x.isdigit():%0A try:%0A return int(x)%0A except ValueError:%0A pass%0A try:%0A return float(x)%0A except ValueError:%0A return x%0A%0A
45cb940db74d99b0dac31a2aace3d8505e4a9046
Add empty file to contain main part of module
datac/main.py
datac/main.py
Python
0.000001
@@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*-%0Aimport copy%0A
323fb80744e63a322fe5ed70d86130aa61aa3c19
Remove unused imports
examples/manifold/plot_swissroll.py
examples/manifold/plot_swissroll.py
""" =================================== Swiss Roll reduction with LLE =================================== An illustration of Swiss Roll reduction with locally linear embedding """ # Author: Fabian Pedregosa -- <[email protected]> # License: BSD, (C) INRIA 2011 print __doc__ import numpy as np import pylab as pl from mpl_toolkits.mplot3d import Axes3D #---------------------------------------------------------------------- # Locally linear embedding of the swiss roll from scikits.learn import manifold, datasets X, color = datasets.samples_generator.swiss_roll(1500) print "Computing LLE embedding" X_r, err = manifold.locally_linear_embedding(X, 12, 2) print "Done. Reconstruction error: %g" % err #---------------------------------------------------------------------- # Plot result fig = pl.figure() ax = fig.add_subplot(211, projection='3d') ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color) ax.set_title("Original data") ax = fig.add_subplot(212) ax.scatter(X_r[:,0], X_r[:,1], c=color) pl.xticks([]), pl.yticks([]) pl.show()
Python
0
@@ -291,78 +291,19 @@ ort -numpy as np%0Aimport pylab as pl%0Afrom mpl_toolkits.mplot3d import Axes3D +pylab as pl %0A%0A%0A#
e8c75e84a158876e71a926bec244af43ad93cbc4
add imu class
imu.py
imu.py
Python
0.000001
@@ -0,0 +1,1809 @@ +import serial%0Aimport math%0Aimport struct%0A%0Aclass IMU:%0A %22%22%22Class for working with a Microstrain IMU%22%22%22%0A def __init__(self):%0A self.IMU_PORT = '/dev/ttyS0'%0A self.IMU_BAUD = 115200%0A self.CMD_ACCEL_ANG_ORIENT = '%5CxC8'%0A self.CMD_ACCEL_ANG_ORIENT_SIZE = 67%0A%0A self.IMU_COMMAND = self.CMD_ACCEL_ANG_ORIENT%0A self.IMU_MESSAGE_SIZE = self.CMD_ACCEL_ANG_ORIENT_SIZE%0A%0A def open_imu(self):%0A self.imu = serial.Serial(self.IMU_PORT, self.IMU_BAUD)%0A%0A def close_imu(self):%0A self.imu.close()%0A%0A def read_imu(self):%0A self.imu.write(self.IMU_COMMAND)%0A%0A #TODO check IMU write%0A%0A data = %5B%5D%0A data = self.imu.read(self.IMU_MESSAGE_SIZE) %0A %0A #TODO check read status, check first char, checksum%0A%0A #conversion to numbers%0A accel_x = struct.unpack('%3Ef', data%5B1:5%5D)%5B0%5D%0A accel_y = struct.unpack('%3Ef', data%5B5:9%5D)%5B0%5D%0A accel_z = struct.unpack('%3Ef', data%5B9:13%5D)%5B0%5D%0A ang_rate_x = struct.unpack('%3Ef', data%5B13:17%5D)%5B0%5D%0A ang_rate_y = struct.unpack('%3Ef', data%5B17:21%5D)%5B0%5D%0A ang_rate_z = struct.unpack('%3Ef', data%5B21:25%5D)%5B0%5D%0A%0A #orientation matrix%0A m_1 = struct.unpack('%3Ef', data%5B33:37%5D)%5B0%5D%0A m_2 = struct.unpack('%3Ef', data%5B45:49%5D)%5B0%5D%0A m_3 = struct.unpack('%3Ef', data%5B57:61%5D)%5B0%5D%0A%0A #handle clock rollover outside of function%0A t = 0%0A t = struct.unpack('%3EI', data%5B61:65%5D)%5B0%5D%0A%0A time = 0.0%0A time = t / 62500.0 # convert time to seconds%0A %0A return accel_x, accel_y, accel_z, m_1, m_2, m_3, ang_rate_x, ang_rate_y, ang_rate_z, time, data %0A%0Adef main():%0A imu = IMU()%0A%0A imu.open_imu()%0A%0A accel_x, accel_y, accel_z, m_1, m_2, m_3, ang_rate_x, ang_rate_y, ang_rate_z, time, data = imu.read_imu()%0A%0A print accel_x%0A print accel_y%0A print accel_z%0A print ang_rate_x%0A print ang_rate_y%0A print ang_rate_z%0A print time%0A%0A imu.close_imu()%0A%0Aif __name__ == %22__main__%22:%0A main()%0A