commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
408be8a0d49b7542c74e016a572499a8c4d85351
|
Add tests to verify team index and add pages render without errors
|
app/teams/tests.py
|
app/teams/tests.py
|
Python
| 0 |
@@ -0,0 +1,349 @@
+from app.test_base import BaseTestCase%0A%0Aclass TestTeamBehavior(BaseTestCase):%0A def test_index_page_200(self):%0A self.login()%0A response = self.client.get('/teams/')%0A self.assert200(response)%0A%0A def test_add_page_200(self):%0A self.login()%0A response = self.client.get('/teams/new')%0A self.assert200(response)
|
|
5ce6283cff4a3a97911a663d777869a7c7377341
|
add http_codes
|
catalogService/http_codes.py
|
catalogService/http_codes.py
|
Python
| 0.000004 |
@@ -0,0 +1,1345 @@
+#%0A# Copyright (c) 2008 rPath, Inc.%0A#%0A%0AHTTP_CONTINUE = 100%0AHTTP_SWITCHING_PROTOCOLS = 101%0AHTTP_PROCESSING = 102%0AHTTP_OK = 200%0AHTTP_CREATED = 201%0AHTTP_ACCEPTED = 202%0AHTTP_NON_AUTHORITATIVE = 203%0AHTTP_NO_CONTENT = 204%0AHTTP_RESET_CONTENT = 205%0AHTTP_PARTIAL_CONTENT = 206%0AHTTP_MULTI_STATUS = 207%0AHTTP_MULTIPLE_CHOICES = 300%0AHTTP_MOVED_PERMANENTLY = 301%0AHTTP_MOVED_TEMPORARILY = 302%0AHTTP_SEE_OTHER = 303%0AHTTP_NOT_MODIFIED = 304%0AHTTP_USE_PROXY = 305%0AHTTP_TEMPORARY_REDIRECT = 307%0AHTTP_BAD_REQUEST = 400%0AHTTP_UNAUTHORIZED = 401%0AHTTP_PAYMENT_REQUIRED = 402%0AHTTP_FORBIDDEN = 403%0AHTTP_NOT_FOUND = 404%0AHTTP_METHOD_NOT_ALLOWED = 405%0AHTTP_NOT_ACCEPTABLE = 406%0AHTTP_PROXY_AUTHENTICATION_REQUIRED = 407%0AHTTP_REQUEST_TIME_OUT = 408%0AHTTP_CONFLICT = 409%0AHTTP_GONE = 410%0AHTTP_LENGTH_REQUIRED = 411%0AHTTP_PRECONDITION_FAILED = 412%0AHTTP_REQUEST_ENTITY_TOO_LARGE = 413%0AHTTP_REQUEST_URI_TOO_LARGE = 414%0AHTTP_UNSUPPORTED_MEDIA_TYPE = 415%0AHTTP_RANGE_NOT_SATISFIABLE = 416%0AHTTP_EXPECTATION_FAILED = 417%0AHTTP_UNPROCESSABLE_ENTITY = 422%0AHTTP_LOCKED = 423%0AHTTP_FAILED_DEPENDENCY = 424%0AHTTP_UPGRADE_REQUIRED = 426%0AHTTP_INTERNAL_SERVER_ERROR = 500%0AHTTP_NOT_IMPLEMENTED = 501%0AHTTP_BAD_GATEWAY = 502%0AHTTP_SERVICE_UNAVAILABLE = 503%0AHTTP_GATEWAY_TIME_OUT = 504%0AHTTP_VERSION_NOT_SUPPORTED = 505%0AHTTP_VARIANT_ALSO_VARIES = 506%0AHTTP_INSUFFICIENT_STORAGE = 507%0AHTTP_NOT_EXTENDED = 510%0A%0A
|
|
5c602a98098bdedeffc2b7359a4b3d8407cb1449
|
Add migration to ensure consistency on file keys.
|
scripts/migrate_inconsistent_file_keys.py
|
scripts/migrate_inconsistent_file_keys.py
|
Python
| 0 |
@@ -0,0 +1,2699 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%22%22%22Find all nodes with different sets of keys for %60files_current%60 and%0A%60files_versions%60, and ensure that all keys present in the former are also%0Apresent in the latter.%0A%22%22%22%0A%0Afrom website.models import Node%0Afrom website.app import init_app%0A%0A%0Adef find_file_mismatch_nodes():%0A %22%22%22Find nodes with inconsistent %60files_current%60 and %60files_versions%60 field%0A keys.%0A %22%22%22%0A return %5B%0A node for node in Node.find()%0A if set(node.files_versions.keys()) != set(node.files_current.keys())%0A %5D%0A%0A%0Adef migrate_node(node):%0A %22%22%22Ensure that all keys present in %60files_current%60 are also present in%0A %60files_versions%60.%0A %22%22%22%0A for key, file_id in node.files_current.iteritems():%0A if key not in node.files_versions:%0A node.files_versions%5Bkey%5D = %5Bfile_id%5D%0A else:%0A if file_id not in node.files_versions%5Bkey%5D:%0A node.files_versions%5Bkey%5D.append(file_id)%0A node.save()%0A%0A%0Adef main(dry_run=True):%0A init_app()%0A nodes = find_file_mismatch_nodes()%0A print('Migrating %7B0%7D nodes'.format(len(nodes)))%0A if dry_run:%0A return%0A for node in nodes:%0A migrate_node(node)%0A%0A%0Aif __name__ == '__main__':%0A import sys%0A dry_run = 'dry' in sys.argv%0A main(dry_run=dry_run)%0A%0A%0Afrom nose.tools import * # noqa%0A%0Afrom tests.base import OsfTestCase%0Afrom tests.factories import ProjectFactory%0A%0Afrom framework.auth import Auth%0A%0A%0Aclass TestMigrateFiles(OsfTestCase):%0A%0A def clear(self):%0A Node.remove()%0A%0A def setUp(self):%0A super(TestMigrateFiles, self).setUp()%0A self.clear()%0A self.nodes = %5B%5D%0A for idx in range(3):%0A node = ProjectFactory()%0A node.add_file(%0A Auth(user=node.creator),%0A 'name',%0A 'contents',%0A len('contents'),%0A 'text/plain',%0A )%0A self.nodes.append(node)%0A self.nodes%5B-1%5D.files_versions = %7B%7D%0A self.nodes%5B-1%5D.save()%0A # Sanity check%0A assert_in('name', self.nodes%5B-1%5D.files_current)%0A assert_not_in('name', self.nodes%5B-1%5D.files_versions)%0A%0A def tearDown(self):%0A super(TestMigrateFiles, self).tearDown()%0A self.clear()%0A%0A def test_get_targets(self):%0A targets = find_file_mismatch_nodes()%0A assert_equal(len(targets), 1)%0A assert_equal(targets%5B0%5D, self.nodes%5B-1%5D)%0A%0A def test_migrate(self):%0A main(dry_run=False)%0A assert_equal(len(find_file_mismatch_nodes()), 0)%0A assert_in('name', self.nodes%5B-1%5D.files_versions)%0A assert_equal(%0A self.nodes%5B-1%5D.files_current%5B'name'%5D,%0A self.nodes%5B-1%5D.files_versions%5B'name'%5D%5B0%5D,%0A )%0A
|
|
e5e6506ab6b5191e309aa75e56e25253c0ba7763
|
Create drivers.py
|
chips/memory/file/drivers.py
|
chips/memory/file/drivers.py
|
Python
| 0.000001 |
@@ -0,0 +1,116 @@
+# This code has to be added to the corresponding __init__.py%0A%0ADRIVERS%5B%22filememory%22%5D = %5B%22PICKLEFILE%22, %22JSONFILE%22%5D%0A%0A
|
|
386baa36355b0e9378fff59fe768d1baa7e73fec
|
Add Himax motion detection example.
|
scripts/examples/Arduino/Portenta-H7/21-Sensor-Control/himax_motion_detection.py
|
scripts/examples/Arduino/Portenta-H7/21-Sensor-Control/himax_motion_detection.py
|
Python
| 0 |
@@ -0,0 +1,914 @@
+# Himax motion detection example.%0A%0Aimport sensor, image, time, pyb%0Afrom pyb import Pin, ExtInt%0A%0Asensor.reset()%0Asensor.set_pixformat(sensor.GRAYSCALE)%0Asensor.set_framesize(sensor.QVGA)%0Asensor.set_framerate(15)%0A%0Asensor.ioctl(sensor.IOCTL_HIMAX_MD_THRESHOLD, 0x01)%0Asensor.ioctl(sensor.IOCTL_HIMAX_MD_WINDOW, (0, 0, 320, 240))%0Asensor.ioctl(sensor.IOCTL_HIMAX_MD_CLEAR)%0Asensor.ioctl(sensor.IOCTL_HIMAX_MD_ENABLE, True)%0A%0Amotion_detected = False%0Adef on_motion(line):%0A global motion_detected%0A motion_detected = True%0A%0Aled = pyb.LED(3)%0Aext = ExtInt(Pin(%22PC15%22), ExtInt.IRQ_RISING, Pin.PULL_DOWN, on_motion)%0A%0Aclock = time.clock()%0Awhile(True):%0A clock.tick()%0A img = sensor.snapshot()%0A if (motion_detected):%0A led.on()%0A time.sleep_ms(500)%0A # Clear motion detection flag%0A sensor.ioctl(sensor.IOCTL_HIMAX_MD_CLEAR)%0A motion_detected = False%0A led.off()%0A print(clock.fps())%0A
|
|
b1ab4ef6fbac0ce02d05464e03599d44721fb239
|
Add an example using partial.
|
examples/partial.py
|
examples/partial.py
|
Python
| 0.000001 |
@@ -0,0 +1,905 @@
+#!/usr/bin/env python%0Afrom functools import partial%0A%0Aimport matplotlib.pyplot as plt%0Aimport numpy as np%0A%0Afrom latexipy import latexipy as lp%0A%0A%0Aif __name__ == '__main__':%0A lp.latexify()%0A%0A figure = partial(lp.figure, folder='some_images', exts=%5B'png'%5D)%0A%0A x = np.linspace(-np.pi, np.pi)%0A y1 = np.sin(x)%0A y2 = np.cos(x)%0A%0A with figure('sin'):%0A plt.plot(x, y1, label='sine')%0A plt.title('Sine')%0A plt.xlabel(r'$%5Ctheta$')%0A plt.ylabel('Value')%0A plt.legend()%0A%0A with figure('cos'):%0A plt.plot(x, y2, label='cosine', c='C1')%0A plt.title('Cosine')%0A plt.xlabel(r'$%5Ctheta$')%0A plt.ylabel('Value')%0A plt.legend()%0A%0A with figure('both'):%0A plt.plot(x, y1, label='sine')%0A plt.plot(x, y2, label='cosine')%0A plt.title('Sine and cosine')%0A plt.xlabel(r'$%5Ctheta$')%0A plt.ylabel('Value')%0A plt.legend()%0A
|
|
e781a1e89b945dad1585f82dfdb77cbffbe8fdeb
|
add unit tests
|
lib/svtplay_dl/tests/prio_streams.py
|
lib/svtplay_dl/tests/prio_streams.py
|
Python
| 0.000001 |
@@ -0,0 +1,1712 @@
+#!/usr/bin/python%0A# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil; coding: utf-8 -*-%0A# ex:ts=4:sw=4:sts=4:et:fenc=utf-8%0A%0Afrom __future__ import absolute_import%0Aimport unittest%0Afrom svtplay_dl.utils import prio_streams%0A%0Aclass Stream(object):%0A def __init__(self, proto, bitrate):%0A self.proto = proto%0A self.bitrate = bitrate%0A def name(self):%0A return self.proto%0A def __repr__(self):%0A return '%25s(%25d)' %25 (self.proto.upper(), self.bitrate)%0A%0Aclass PrioStreamsTest(unittest.TestCase):%0A def _gen_proto_case(self, ordered, unordered, default=True, expected=None):%0A streams = %5BStream(x, 100) for x in unordered%5D%0A%0A kwargs = %7B%7D%0A if not default:%0A kwargs%5B'protocol_prio'%5D = ordered%0A if expected is None:%0A expected = %5Bstr(Stream(x, 100)) for x in ordered%5D%0A%0A return self.assertEqual(%0A %5Bstr(x) for x in prio_streams(streams, **kwargs)%5D,%0A expected%0A )%0A%0A def test_default_order(self):%0A return self._gen_proto_case(%0A %5B'hls', 'hds', 'http', 'rtmp'%5D,%0A %5B'rtmp', 'hds', 'hls', 'http'%5D%0A )%0A%0A def test_custom_order(self):%0A return self._gen_proto_case(%0A %5B'http', 'rtmp', 'hds', 'hls'%5D,%0A %5B'rtmp', 'hds', 'hls', 'http'%5D,%0A default=False,%0A )%0A%0A def test_custom_order_1(self):%0A return self._gen_proto_case(%0A %5B'http'%5D,%0A %5B'rtmp', 'hds', 'hls', 'http'%5D,%0A default=False,%0A )%0A%0A def test_proto_unavail(self):%0A return self._gen_proto_case(%0A %5B'http', 'rtmp'%5D,%0A %5B'hds', 'hls', 'https'%5D,%0A default=False,%0A expected=%5B%5D,%0A )%0A
|
|
4e4a8bbb459e6158a7c2d22c04849de9b4de2693
|
Add directory.py to the directory package
|
classyfd/directory/directory.py
|
classyfd/directory/directory.py
|
Python
| 0.000001 |
@@ -0,0 +1,62 @@
+%22%22%22Contains a Directory class to represent real directories%22%22%22
|
|
0c079b7160cf635c14a016d418d2bc8d3d521f26
|
add docker start tool
|
tools/run.py
|
tools/run.py
|
Python
| 0.000001 |
@@ -0,0 +1,1880 @@
+# coding=utf-8%0Aimport os%0Aimport json%0A%0Aos.system(%22docker rm -f redis%22)%0Aos.system(%22docker rm -f mysql%22)%0Aos.system(%22docker rm -f oj_web_server%22)%0A%0Aif os.system(%22docker run --name mysql -v /root/data:/var/lib/mysql -v /root/data/my.cnf:/etc/my.cnf -e MYSQL_ROOT_PASSWORD=root -d mysql/mysql-server:latest%22):%0A print %22Error start mysql%22%0A exit()%0A%0Aif os.system(%22docker run --name redis -d redis%22):%0A print %22Error start redis%22%0A exit()%0A%0Aif os.system(%22docker run --name oj_web_server -e oj_env=server -v /root/qduoj:/code -v /root/test_case:/code/test_case -v /root/log:/code/log -v /root/upload:/code/upload -v /root/qduoj/dockerfiles/oj_web_server/supervisord.conf:/etc/supervisord.conf -v /root/qduoj/dockerfiles/oj_web_server/gunicorn.conf:/etc/gunicorn.conf -v /root/qduoj/dockerfiles/oj_web_server/mq.conf:/etc/mq.conf -d -p 127.0.0.1:8080:8080 --link mysql --link=redis oj_web_server%22):%0A print %22Erro start oj_web_server%22%0A exit()%0A%0Ainspect_redis = json.loads(os.popen(%22docker inspect redis%22).read())%0A%0Aif not inspect_redis:%0A print %22Error when inspect redis ip%22%0A exit()%0Aredis_ip = inspect_redis%5B0%5D%5B%22NetworkSettings%22%5D%5B%22IPAddress%22%5D%0Aprint %22redis ip %22, redis_ip%0A%0A%0Ainspect_mysql = json.loads(os.popen(%22docker inspect mysql%22).read())%0Aif not inspect_mysql:%0A print %22Error when inspect mysql ip%22%0A exit()%0Amysql_ip = inspect_mysql%5B0%5D%5B%22NetworkSettings%22%5D%5B%22IPAddress%22%5D%0Aprint %22mysql ip %22, mysql_ip%0A%0A%0Af = open(%22/etc/profile%22, %22r%22)%0Acontent = %22%22%0Afor line in f.readlines():%0A if line.startswith(%22export REDIS_PORT_6379_TCP_ADDR%22):%0A content += (%22%5Cnexport REDIS_PORT_6379_TCP_ADDR=%22 + redis_ip + %22%5Cn%22)%0A elif line.startswith(%22export submission_db_host%22):%0A content += (%22%5Cnexport submission_db_host=%22 + mysql_ip + %22%5Cn%22)%0A else:%0A content += line%0Af.close()%0A%0A%0Af = open(%22/etc/profile%22, %22w%22)%0Af.write(content)%0Af.close()%0A%0Aprint %22Please run source /etc/profile%22%0A%0A
|
|
4cf7f6c23bc9d01c6780afa4d27bf9e5e71fb72b
|
add hacky download
|
util/unidata_dl.py
|
util/unidata_dl.py
|
Python
| 0 |
@@ -0,0 +1,961 @@
+import glob%0Aimport os%0Aimport datetime%0A%0Asts = datetime.datetime(2016, 11, 21, 0, 0)%0Aets = datetime.datetime(2016, 11, 21, 3, 0)%0Ainterval = datetime.timedelta(minutes=1)%0A%0Aos.chdir('data/nexrad/NIDS')%0Afor nexrad in glob.glob('???'):%0A os.chdir(nexrad)%0A for nids in %5B'N0Q', 'NET', 'N0R', 'EET'%5D:%0A if not os.path.isdir(nids):%0A continue%0A os.chdir(nids)%0A now = sts%0A while now %3C ets:%0A fp = %22%25s_%25s%22 %25 (nids, now.strftime(%22%25Y%25m%25d_%25H%25M%22))%0A if not os.path.isfile(fp):%0A url = now.strftime((%22http://motherlode.ucar.edu/native/radar/%22%0A %22level3/%22 + nids + %22/%22 + nexrad +%0A %22/%25Y%25m%25d/Level3_%22 + nexrad + %22_%22 + nids +%0A %22_%25Y%25m%25d_%25H%25M.nids%22))%0A cmd = %22wget -q -O %25s %25s%22 %25 (fp, url)%0A os.system(cmd)%0A now += interval%0A os.chdir('..')%0A os.chdir('..')%0A
|
|
e8799d50dea038fe6e6bb94a4676d2f6eaf1ed1d
|
Update judgeapi.py
|
judge/judgeapi.py
|
judge/judgeapi.py
|
from django.conf import settings
import socket
import struct
import json
import logging
from judge.simple_comet_client import delete_channel, create_channel, send_message
logger = logging.getLogger('judge.judgeapi')
size_pack = struct.Struct('!I')
def judge_request(packet, reply=True):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((settings.BRIDGED_DJANGO_HOST, settings.BRIDGED_DJANGO_PORT))
output = json.dumps(packet, separators=(',', ':'))
output = output.encode('zlib')
writer = sock.makefile('w', 0)
writer.write(size_pack.pack(len(output)))
writer.write(output)
writer.close()
if reply:
reader = sock.makefile('r', -1)
input = reader.read(4)
if not input:
raise ValueError('Judge did not respond')
length = size_pack.unpack(input)[0]
input = reader.read(length)
if not input:
raise ValueError('Judge did not respond')
reader.close()
sock.close()
input = input.decode('zlib')
result = json.loads(input)
return result
def judge_submission(submission):
chan = 'sub_%d' % submission.id
delete_channel(chan) # Delete if exist
create_channel(chan)
create_channel('submissions') #TODO: only attempt to create once
try:
response = judge_request({
'name': 'submission-request',
'submission-id': submission.id,
'problem-id': submission.problem.code,
'language': submission.language.key,
'source': submission.source,
})
except BaseException:
logger.exception('Failed to send request to judge')
submission.status = 'IE'
submission.save()
success = False
else:
submission.status = 'QU' if (response['name'] == 'submission-received' and
response['submission-id'] == submission.id) else 'IE'
id = 1 if submission.user.is_admin() else (2 is submission.user.is_problem_setter() else 0)
send_message('submissions', 'submission-start %d %s %s %s %s %s %d %s' %
(submission.id, submission.problem.code, submission.problem.name.replace(" ", "\f"),
submission.status, submission.language.key,
submission.user.user.username, id,
str(submission.date).replace(" ", "\f")))
success = True
submission.time = None
submission.memory = None
submission.points = None
submission.result = None
submission.save()
return success
def abort_submission(submission):
judge_request({'name': 'terminate-submission', 'submission-id': submission.id}, reply=False)
|
Python
| 0.000001 |
@@ -1949,10 +1949,16 @@
E'%0A%0A
-%09%09
+
id =
@@ -1999,17 +1999,17 @@
lse (2 i
-s
+f
submiss
|
3dbf91d4d447f6dbddece040b3a9dcbeb8ebcd22
|
Add missing migrations
|
getyourdata/data_request/migrations/0023_auto_20160716_0946.py
|
getyourdata/data_request/migrations/0023_auto_20160716_0946.py
|
Python
| 0.000029 |
@@ -0,0 +1,3078 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.6 on 2016-07-16 09:46%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django_extensions.db.fields%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('data_request', '0022_faqcontent_priority'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='RequestContent',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('created_on', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name=b'Created on')),%0A ('updated_on', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name=b'Updated on')),%0A ('title', models.CharField(default='Default', max_length=255, unique=True)),%0A ('header', models.TextField(blank=True, default='Dear recipient,')),%0A ('header_en', models.TextField(blank=True, default='Dear recipient,', null=True)),%0A ('header_fi', models.TextField(blank=True, default='Dear recipient,', null=True)),%0A ('content1', models.TextField(blank=True, default='content first')),%0A ('content1_en', models.TextField(blank=True, default='content first', null=True)),%0A ('content1_fi', models.TextField(blank=True, default='content first', null=True)),%0A ('content2', models.TextField(blank=True, default='content second')),%0A ('content2_en', models.TextField(blank=True, default='content second', null=True)),%0A ('content2_fi', models.TextField(blank=True, default='content second', null=True)),%0A ('footer', models.TextField(blank=True, default='Regards,')),%0A ('footer_en', models.TextField(blank=True, default='Regards,', null=True)),%0A ('footer_fi', models.TextField(blank=True, default='Regards,', null=True)),%0A %5D,%0A options=%7B%0A 'abstract': False,%0A %7D,%0A ),%0A migrations.DeleteModel(%0A name='EmailContent',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='content1_en',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='content1_fi',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='content2_en',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='content2_fi',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='footer_en',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='footer_fi',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='header_en',%0A ),%0A migrations.RemoveField(%0A model_name='pdfcontents',%0A name='header_fi',%0A ),%0A %5D%0A
|
|
ddb9e1c0160f40fe60330c247906b9b41f18be1b
|
Create hearthstone_way_to_legend.py
|
hearthstone_way_to_legend.py
|
hearthstone_way_to_legend.py
|
Python
| 0.00009 |
@@ -0,0 +1,793 @@
+import random,statistics%0A%0Awinrate = 0.51%0Aiterations = 100%0A%0Agames = %5B0%5D*iterations%0Apasswinstreak = 5*5 # below Rank 5%0Afor x in range(iterations):%0A %0A # 1-10 11-15 =%3E 15 rank%0A ladderPosition = 5*10 + 4*5%0A winstrek = 0%0A %0A while True:%0A games%5Bx%5D = games%5Bx%5D + 1%0A if random.random() %3C= winrate:%0A winstreak = winstrek + 1%0A ladderPosition = ladderPosition - 1%0A if winstrek %3E= 2 and ladderPosition %3E passwinstreak:%0A ladderPosition = ladderPosition - 1%0A else:%0A winstreak = 0%0A ladderPosition = ladderPosition + 1%0A if ladderPosition is 0:%0A break%0A %0A%0A%0Aprint(%22Total games (mean of %22 + str(iterations) + %22 iterations): %22+ str(statistics.mean(games)))%0Ainput()%0A
|
|
a801deeaa00e443b3c68c1fbcea1e6ff62d90082
|
Add Python script to generate users
|
python/addusers.py
|
python/addusers.py
|
Python
| 0.000003 |
@@ -0,0 +1,2019 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22 %0AAdds a sequential number of users into a test database%0Awith username: newusern and password newusern%0A%0ANot for production usage%0A%22%22%22%0A%0Aimport MySQLdb%0A%0A%0Ahostname = # FILL IN %0Ausername = # FILL IN %0Apassword = # FILL IN %0A%0A%0A%0A# Simple routine to run a query on a database and print the results:%0Adef doQuery( conn, n_users ) :%0A cur = conn.cursor()%0A%0A try:%0A for i in range(0,n_users):%0A cur.execute(%22%22%22CREATE USER %5C'newuser%25i%5C'@%5C'localhost%5C' IDENTIFIED BY %5C'password%25i%5C'%22%22%22 %25 (i,i) )%0A cur.execute( %22%22%22GRANT ALL PRIVILEGES ON * . * TO %5C'newuser%25i%5C'@%5C'localhost%5C'%22%22%22 %25 i )%0A cur.execute( %22%22%22FLUSH PRIVILEGES%22%22%22 )%0A except MySQLdb.Error, e:%0A try:%0A print (%22MySQL Error %5B%25d%5D: %25s%22 %25 (e.args%5B0%5D, e.args%5B1%5D))%0A except IndexError:%0A print (%22MySQL Error: %25s%22 %25 str(e))%0A%0A%0A if __name__ == '__main__':%0A print(%22Using mysql.connector%E2%80%A6%22)%0A myConnection = MySQLdb.connect( host=hostname, user=username, passwd=password, 20)%0A doQuery( myConnection )%0A myConnection.close()%0A%0A%0A%0A%0A%0A%0A
|
|
6602bbfa47c2523bc4d058bfa1d5a28d5fc33836
|
Add tag reading
|
bw_read_xml.py
|
bw_read_xml.py
|
from lxml import etree
from copy import copy
class BattWarsObject(object):
def __init__(self, obj):
self._attributes = {}
self.type = obj.get("type")
self.id = obj.get("id")
self._xml_node = obj
# We will create a name for this object by putting the type and ID together.
self.name = "{0}[{1}]".format(self.type, self.id)
for attr in obj:
assert attr not in self._attributes
self._attributes[attr.get("name")] = attr
@property
def attributes(self):
return self._attributes
def has_attr(self, name):
return name in self._attributes
def get_attr(self, name):
return self._attributes[name]
def get_attr_type(self, name):
return self._attributes[name].get("type")
def get_attr_elements(self, name):
return [elem.text for elem in self._attributes[name]]
# Use this for attributes that have only 1 element
def get_attr_value(self, name):
return self._attributes[name][0].text
def set_attr_value(self, name, val):
self._attributes[name][0].text = val
class BattWarsLevel(object):
def __init__(self, fileobj):
self._tree = etree.parse(fileobj)
self._root = self._tree.getroot()
self.obj_map = {}
for obj in self._root:
bw_object = BattWarsObject(obj)
self.obj_map[bw_object.id] = bw_object
# The root of a BW level xml file contains the objects
# used by that level.
@property
def objects(self):
return self._root
def get_attributes(self, obj):
return []
def create_object_hierarchy(id_map):
hierarchy = {}
never_referenced = {obj_id: True for obj_id in id_map.keys()}
for obj_id, obj in id_map.items():
if obj.has_attr("mBase"):
# In the xml file mBase has the type pointer, but it's actually
# the ID of a different object in the file.
pointer = obj.get_attr_value("mBase")
assert pointer in id_map
if obj.id not in hierarchy:
del never_referenced[obj_id]
hierarchy[obj.id] = pointer
else:
raise RuntimeError("one object shouldn't have more than 1 reference")
return hierarchy, never_referenced
def create_ref(ref, hierarchy, id_map):
if ref.id not in hierarchy:
return ref.name
else:
return ref.name + " => " + create_ref(id_map[hierarchy[ref.id]], hierarchy, id_map)
if __name__ == "__main__":
with open("bw1_sandbox/C1_Gauntlet_Level.xml", "r") as f:
bw_level = BattWarsLevel(f)
types = {}
id_map = {}
for obj in bw_level.objects:
bw_object = BattWarsObject(obj)
if bw_object.type not in types:
types[bw_object.type] = 1
else:
types[bw_object.type] += 1
assert bw_object.id not in id_map
id_map[bw_object.id] = bw_object
# Never referenced actually doesn't mean that it isn't referenced at all,
# but that it isn't referenced in a mBase attribute
hierarchy, never_referenced = create_object_hierarchy(id_map)
print(never_referenced)
with open("hierarchy.txt", "w") as f:
f.write("")
with open("hierarchy.txt", "a") as f:
for obj_id in sorted(id_map.keys()):
obj = id_map[obj_id]
if obj_id in hierarchy:
f.write(create_ref(obj, hierarchy, id_map)+"\n")
print("done")
|
Python
| 0 |
@@ -901,16 +901,93 @@
name%5D%5D%0A%0A
+ def get_attr_tag(self, name):%0A return self._attributes%5Bname%5D.tag%0A%0A
# Us
@@ -2095,24 +2095,25 @@
+#
assert point
@@ -2125,16 +2125,16 @@
id_map%0A
-
%0A
@@ -2356,17 +2356,32 @@
eference
-%22
+: %25s%22 %25 obj.name
)%0A%0A r
@@ -2649,22 +2649,21 @@
-with open(%22bw1
+infile = %22bw2
_san
@@ -2671,30 +2671,46 @@
box/
-C1_Gauntlet_Level.xml%22
+SP_5.3_Level.xml%22%0A with open(infile
, %22r
@@ -2996,24 +2996,25 @@
1%0A%0A
+#
assert bw_ob
@@ -3035,16 +3035,85 @@
id_map%0A
+ if bw_object.id in id_map:%0A print(bw_object.name)%0A
|
7078197ff7f7d8acd5a16de0c1debda8515efdf7
|
remove existed catalog directory
|
myriadeploy/setup_cluster.py
|
myriadeploy/setup_cluster.py
|
#!/usr/bin/env python
import socket
import subprocess
import sys
def host_port_list(workers):
return map(lambda (x,y) : str(x)+':'+str(y), workers)
def read_workers(filename):
ret = []
for line in open(filename,'r'):
line = line.strip()
# Skip blank lines or comments
if len(line) == 0 or line[0] == '#':
continue
# Extract the host:port string
hostline = line.split(':')
if len(hostline) != 2:
raise Exception("expected host:port instead of %s" % (line))
hostname = hostline[0]
try:
socket.gethostbyname(hostname)
except socket.error:
raise Exception("unable to resolve hostname %s" % (hostname))
try:
port = int(hostline[1])
except:
raise Exception("unable to convert %s to an int" % (port))
ret.append((hostname, port))
return ret
def make_catalog(description, workers):
args = ["./run_catalog_maker.sh", \
description, \
str(len(workers))]
args += host_port_list(workers)
if subprocess.call(args):
print >> sys.stderr, "error making the Catalog"
sys.exit(1)
def remote_mkdir(hostname, dirname):
args = ["ssh", hostname, "mkdir", "-p", dirname]
return subprocess.call(args)
def copy_master_catalog(hostname, dirname, remote_root):
local_path = "%s/%s" % (dirname, "master.catalog")
remote_path = "%s:%s/%s-files/%s" % (hostname, remote_root, dirname, dirname)
args = ["scp", local_path, remote_path]
return subprocess.call(args)
def copy_worker_catalog(hostname, dirname, remote_root, i):
local_path = "%s/worker_%d" % (dirname, i)
remote_path = "%s:%s/%s-files/%s" % (hostname, remote_root, dirname, dirname)
args = ["scp", "-r", local_path, remote_path]
return subprocess.call(args)
def copy_catalogs(description, remote_root, workers):
for (i,(hostname,port)) in enumerate(workers):
if remote_mkdir(hostname, "%s/%s-files/%s" \
% (remote_root, description, description)):
raise Exception("Error making directory on master %s" \
% (hostname,))
# Master
if i == 0:
if copy_master_catalog(hostname, description, remote_root):
raise Exception("Error copying master.catalog to %s" % (hostname,))
# Workers
else:
if copy_worker_catalog(hostname, description, remote_root, i):
raise Exception("Error copying worker.catalog to %s " % (hostname,))
def copy_distribution(workers, dirname, remote_root):
for (hostname, port) in workers:
remote_path = "%s:%s/%s-files" % (hostname, remote_root, dirname)
to_copy = ["myriad-0.1.jar", "sqlite4java-282",
"start_server.py", "start_workers.py",
"conf"]
args = ["scp", "-qr"] + to_copy + [remote_path]
if subprocess.call(args):
raise Exception("Error copying distribution to %s" % (hostname,))
def main(argv):
# Usage
if len(argv) != 4:
print >> sys.stderr, "Usage: %s <description> <expt_root> <workers.txt>" % (argv[0])
print >> sys.stderr, " description: any alphanumeric plus '-_' string."
print >> sys.stderr, " expt_root: where the files should be stored locally, e.g., /scratch."
print >> sys.stderr, " workers.txt: a list of host:port strings;"
print >> sys.stderr, " the first entry is the master."
sys.exit(1)
# Command-line arguments
DESCRIPTION = argv[1]
EXPT_ROOT = argv[2]
WORKERS_FILE = argv[3]
# Figure out the master and workers
workers = read_workers(WORKERS_FILE)
# Step 1: make the Catalog
make_catalog(DESCRIPTION, workers)
# Step 2: Copy each catalog over
copy_catalogs(DESCRIPTION, EXPT_ROOT, workers)
# Step 3: Copy over java, libs, myriad
copy_distribution(workers, DESCRIPTION, EXPT_ROOT)
if __name__ == "__main__":
main(sys.argv)
|
Python
| 0.000001 |
@@ -952,24 +952,88 @@
, workers):%0A
+ args = %5B%22rm%22, %22-r%22, description%5D%0A subprocess.call(args);%0A
args = %5B
|
821f1b83c441122b28ad2dc869576ca22a4ee642
|
Create ngram_service.py
|
ngram_utils/ngram_service.py
|
ngram_utils/ngram_service.py
|
Python
| 0.000003 |
@@ -0,0 +1,1115 @@
+from thrift.transport import TSocket%0Afrom thrift.protocol import TBinaryProtocol%0Afrom thrift.transport import TTransport%0Afrom libs.hbase import Hbase%0A%0Aclass NgramService(object):%0A%0A def __init__(self, mongo_host, hbase_host):%0A mclient = settings.MONGO_CLIENT%0A unigram_db = mclient%5B'unigrams'%5D%0A bigram_db = mclient%5B'bigrams'%5D%0A trigram_db = mclient%5B'trigrams'%5D%0A unigram_col_all = unigram_db%5B'all'%5D%0A bigram_col_preps = bigram_db%5B'preps'%5D%0A trigram_col_preps = trigram_db%5B'preps'%5D%0A # No Determinatives%0A trigram_db_nodt = mclient%5B'tetragrams'%5D%0A bigram_db_nodt = mclient%5B'bigrams_nodt'%5D%0A trigram_preps_nodt1 = trigram_db_nodt%5B'preps1'%5D%0A trigram_preps_nodt2 = trigram_db_nodt%5B'preps2'%5D%0A bigram_col_preps_nodt = bigram_db_nodt%5B'preps'%5D%0A%0A # HBASE%0A h_unigrams = 'ngrams1'%0A h_bigrams = 'ngrams2'%0A h_trigrams_skips = 'ngrams3'%0A transport = TTransport.TBufferedTransport(TSocket.TSocket(*settings.HBASE_HOST))%0A protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport)%0A client = Hbase.Client(protocol)%0A transport.open()%0A rate = 0%0A start = time.time()%0A
|
|
897b56183c3b30a0bc4f439e20d42ce8da2b444c
|
add empty unit test for viewhandler module
|
supvisors/tests/test_viewhandler.py
|
supvisors/tests/test_viewhandler.py
|
Python
| 0 |
@@ -0,0 +1,1294 @@
+#!/usr/bin/python%0A#-*- coding: utf-8 -*-%0A%0A# ======================================================================%0A# Copyright 2016 Julien LE CLEACH%0A# %0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A# %0A# http://www.apache.org/licenses/LICENSE-2.0%0A# %0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A# ======================================================================%0A%0Aimport sys%0Aimport unittest%0A%0Afrom supvisors.tests.base import DummySupvisors%0A%0A%0Aclass ViewHandlerTest(unittest.TestCase):%0A %22%22%22 Test case for the viewhandler module. %22%22%22%0A%0A def test_TODO(self):%0A %22%22%22 Test the values set at construction. %22%22%22%0A from supvisors.viewhandler import ViewHandler%0A handler = ViewHandler()%0A self.assertIsNotNone(handler)%0A%0A%0Adef test_suite():%0A return unittest.findTestCases(sys.modules%5B__name__%5D)%0A%0Aif __name__ == '__main__':%0A unittest.main(defaultTest='test_suite')%0A
|
|
6050610a5cf34bc55a05fa3a8d8a38f6e8e743af
|
Add test_ko.py for "ko" locale (#9)
|
tests/localization_tests/test_ko.py
|
tests/localization_tests/test_ko.py
|
Python
| 0 |
@@ -0,0 +1,2189 @@
+# -*- coding: utf-8 -*-%0A%0Afrom pendulum import Pendulum%0A%0Afrom .. import AbstractTestCase%0Afrom . import AbstractLocalizationTestCase%0A%0A%0A%0Aclass KoTest(AbstractLocalizationTestCase, AbstractTestCase):%0A%0A locale = 'ko'%0A%0A def diff_for_humans(self):%0A with self.wrap_with_test_now():%0A d = Pendulum.now().sub_second()%0A self.assertEqual('1 %EC%B4%88 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_seconds(2)%0A self.assertEqual('2 %EC%B4%88 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_minute()%0A self.assertEqual('1 %EB%B6%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_minutes(2)%0A self.assertEqual('2 %EB%B6%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_hour()%0A self.assertEqual('1 %EC%8B%9C%EA%B0%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_hours(2)%0A self.assertEqual('2 %EC%8B%9C%EA%B0%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_day()%0A self.assertEqual('1 %EC%9D%BC %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_days(2)%0A self.assertEqual('2 %EC%9D%BC %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_week()%0A self.assertEqual('1 %EC%A3%BC%EC%9D%BC %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_weeks(2)%0A self.assertEqual('2 %EC%A3%BC%EC%9D%BC %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_month()%0A self.assertEqual('1 %EA%B0%9C%EC%9B%94 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_months(2)%0A self.assertEqual('2 %EA%B0%9C%EC%9B%94 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_year()%0A self.assertEqual('1 %EB%85%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().sub_years(2)%0A self.assertEqual('2 %EB%85%84 %EC%A0%84', d.diff_for_humans())%0A%0A d = Pendulum.now().add_second()%0A self.assertEqual('1 %EC%B4%88 %ED%9B%84', d.diff_for_humans())%0A%0A d = Pendulum.now().add_second()%0A d2 = Pendulum.now()%0A self.assertEqual('1 %EC%B4%88 %EB%92%A4', d.diff_for_humans(d2))%0A self.assertEqual('1 %EC%B4%88 %EC%95%9E', d2.diff_for_humans(d))%0A%0A self.assertEqual('1 %EC%B4%88', d.diff_for_humans(d2, True))%0A self.assertEqual('2 %EC%B4%88', d2.diff_for_humans(d.add_second(), True))%0A
|
|
013c6c57959fd8317ba8b27a2a467a37f0a1d8be
|
Create __init__.py
|
stock/__init__.py
|
stock/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
1eb1851e4dec9c6425c3cf127e6c4ec5b0d3c987
|
Add LineNumberTable tests
|
tests/test_line_number_attribute.py
|
tests/test_line_number_attribute.py
|
Python
| 0 |
@@ -0,0 +1,750 @@
+# -*- coding: utf-8 -*-%0Aimport os.path%0A%0Aimport pytest%0A%0Afrom jawa import ClassFile%0A%0A%[email protected]%0Adef cf():%0A sample_path = os.path.join(%0A os.path.dirname(__file__),%0A 'data',%0A 'HelloWorldDebug.class'%0A )%0A%0A with open(sample_path, 'rb') as fin:%0A cf = ClassFile(fin)%0A yield cf%0A%0A%0Adef test_exceptions_read(cf):%0A m = cf.methods.find_one(name='main')%0A a = m.code.attributes.find_one(name='LineNumberTable')%0A%0A assert len(a.line_no) == 2%0A%0A assert a.line_no%5B0%5D == (0, 3)%0A assert a.line_no%5B1%5D == (8, 4)%0A%0A%0Adef test_exceptions_write(cf):%0A m = cf.methods.find_one(name='main')%0A a = m.code.attributes.find_one(name='LineNumberTable')%0A%0A assert a.info == b'%5Cx00%5Cx02%5Cx00%5Cx00%5Cx00%5Cx03%5Cx00%5Cx08%5Cx00%5Cx04'%0A
|
|
beb98425423e0278d9d4d5e39e6b5196146425a0
|
add manual tests
|
manual_tests.py
|
manual_tests.py
|
Python
| 0 |
@@ -0,0 +1,2266 @@
+import os%0Aimport sys%0Aimport copy%0A%0Afrom estnin import estnin%0Afrom estnin import _estnin%0Afrom datetime import date%0Afrom timeit import default_timer as timer%0A%0A%0Adef target(count):%0A # return %5Bp for p in estnin.create(estnin.FEMALE, date(2199, 12, 1), 0)%5D%0A for _ in range(count):%0A #estnin(89912319991, set_checksum=False)%0A estnin(estnin.MIN, set_checksum=False)%0A return count%0A%0Adef print_person(person):%0A print('='*30)%0A print('to str: %25s' %25 person)%0A print('is male: %25s' %25 person.is_male)%0A print('is female: %25s' %25 person.is_female)%0A print('date: %25s' %25 person.date)%0A print('year: %25s' %25 person.year)%0A print('month: %25s' %25 person.month)%0A print('day: %25s' %25 person.day)%0A print('sequence: %25s' %25 person.sequence)%0A print('checksum: %25s' %25 person.checksum)%0A%0Adef performance():%0A %22%22%22%0A %5B*%5D creating list of 91999 elements took: 3.30743s, 27815.870 elems/s%0A baseline%0A%0A %5B*%5D creating list of 91999 elements took: 3.01910s, 30472.310 elems/s%0A __int__ optimization%0A%0A %5B*%5D creating list of 91999 elements took: 2.83526s, 32448.128 elems/s%0A __str__ optimization%0A%0A %5B*%5D creating list of 91999 elements took: 2.77732s, 33125.086 elems/s%0A create does not cast to str%0A %22%22%22%0A times = %5B%5D%0A rounds = 20%0A for c in range(rounds):%0A print(%22%5Cr%5B*%5D round: %7B%7D/%7B%7D%22.format(c+1, rounds), end='')%0A start = timer()%0A persons = target(10000)%0A end = timer()%0A times.append(end - start)%0A print()%0A total = sum(times)/len(times)%0A print(%22%5B*%5D times (ms):%22, ' '.join(map(lambda time: '%7B:.2f%7D'.format(time*100), times)))%0A print(%22%5B*%5D creating list of %7B%7D elements took: average %7B:.3f%7Dms, %7B:.3f%7D elems/s %22.format(persons, total*100, persons/total))%0A%0Adef test():%0A e = estnin(estnin.MIN)%0A print_person(e)%0A o = copy.copy(e)%0A o.month += 1%0A print_person(o)%0A print((-e))%0A print_person(e)%0A%0Aif __name__ == '__main__':%0A try:%0A person = estnin.create(estnin.MALE, date(1800, 1, 1), 0)%0A print_person(person)%0A%0A performance()%0A%0A test()%0A%0A person = estnin.create(estnin.MALE, date(1800, 1, 1), 0)%0A%0A print(_estnin(3, date(1989, 8 ,28), 27, 1))%0A%0A except KeyboardInterrupt:%0A sys.exit()%0A
|
|
e785008aa948e929f7e3ecab3445c1347cb128f3
|
Add setup.py file
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,878 @@
+#!/usr/bin/env python%0A%0Afrom setuptools import find_packages%0Afrom setuptools import setup%0A%0A%0Asetup(%0A name='django-signage',%0A version='0.0.1',%0A description='A lightweight web-based digital signage application',%0A license='BSD',%0A author='Jason Bittel',%0A author_email='[email protected]',%0A url='https://github.com/jbittel/django-signage',%0A download_url='https://github.com/jbittel/django-signage',%0A packages=find_packages(),%0A include_package_data=True,%0A classifiers=%5B%0A 'Development Status :: 3 - Alpha',%0A 'Environment :: Web Environment',%0A 'Programming Language :: Python',%0A 'Framework :: Django',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: BSD License',%0A 'Operating System :: OS Independent',%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0A %5D,%0A)%0A
|
|
7a21009efda275372be7b801e07635bd2a9e47af
|
add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,716 @@
+import os%0Afrom setuptools import setup, find_packages%0A%0Aimport coupons%0A%0A%0Adef read(fname):%0A return open(os.path.join(os.path.dirname(__file__), fname)).read()%0A%0Asetup(%0A name='django-coupons',%0A version=coupons.__version__,%0A description='A reuseable Django application for coupon gereration and handling.',%0A long_description=read('README.md'),%0A license=read('LICENSE'),%0A author='byteweaver',%0A author_email='[email protected]',%0A url='https://github.com/byteweaver/django-coupons',%0A packages=find_packages(),%0A install_requires=%5B%0A 'django',%0A %5D,%0A tests_require=%5B%0A 'django-nose',%0A 'coverage',%0A 'django-coverage',%0A %5D,%0A test_suite='coupons.tests',%0A)%0A
|
|
d0430066830350b3ef1621bb7c9d7ae7ae7045f4
|
Add setup.py.
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,852 @@
+%22%22%22%0A * Copyright (c) 2016. Mingyu Gao%0A * All rights reserved.%0A *%0A%22%22%22%0Aimport os%0Aimport re%0A# To use a consistent encoding%0Afrom codecs import open%0A# Always prefer setuptools over distutils%0Aimport setuptools%0A%0Ahere = os.path.abspath(os.path.dirname(__file__))%0A%0Apackage = 'easypyplot'%0Aversion = '0.0.0'%0Adesc = 'Python matplotlib utilities and wrappers'%0A%0A# Get version number%0Awith open(os.path.join(here, package, '__init__.py'), encoding='utf-8') as fh:%0A matches = re.findall(r'%5E%5Cs*__version__%5Cs*=%5Cs*%5B%5C'%22%5D(%5B%5E%5C'%22%5D+)%5B%5C'%22%5D',%0A fh.read(), re.M)%0A if matches:%0A version = matches%5B-1%5D%0A%0Asetuptools.setup(%0A name=package,%0A version=version,%0A%0A description=desc,%0A%0A author='Mingyu Gao',%0A author_email='[email protected]',%0A%0A #long_description='',%0A #url='',%0A #license='',%0A%0A packages=%5Bpackage%5D,%0A%0A #install_requires=%5B%5D,%0A)%0A
|
|
3f1b78f5156a6ee18020340290dde24d02d01105
|
Add basic setup.py
|
setup.py
|
setup.py
|
Python
| 0.000002 |
@@ -0,0 +1,1034 @@
+%22%22%22%0AFlask-AtlassianConnect%0A-------------%0A%0AThis is the description for that library%0A%22%22%22%0Afrom setuptools import setup%0A%0A%0Asetup(%0A name='AC-Flask-HipChat',%0A version='0.1-dev',%0A url='https://bitbucket.org/mrdon/ac-flask-hipchat',%0A license='APLv2',%0A author='Don Brown',%0A author_email='[email protected]',%0A description='Atlassian Connect library based on Flask for HipChat',%0A long_description=__doc__,%0A packages=%5B'ac_flask', 'ac_flask.hipchat'%5D,%0A zip_safe=False,%0A include_package_data=True,%0A platforms='any',%0A install_requires=%5B%0A 'Flask',%0A 'pymongo',%0A 'redis',%0A 'requests',%0A 'PyJWT'%0A %5D,%0A classifiers=%5B%0A 'Environment :: Web Environment',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: BSD License',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Topic :: Internet :: WWW/HTTP :: Dynamic Content',%0A 'Topic :: Software Development :: Libraries :: Python Modules'%0A %5D%0A)
|
|
ec25f1901d60814a62790cae2becfb6cac0f5e3e
|
add argparse dep
|
setup.py
|
setup.py
|
#!/usr/bin/python
from setuptools import setup, find_packages
setup(
name='configdb',
version='0.1',
description='database framework for configuration info',
author='ale',
author_email='[email protected]',
url='http://git.autistici.org/p/configdb',
install_requires=['Flask', 'formencode', 'inflect', 'SQLAlchemy>0.7'],
setup_requires=[],
zip_safe=True,
packages=find_packages(),
entry_points={
'console_scripts': [
'configdb-api-server = configdb.server.wsgiapp:main',
'configdb-client = configdb.client.cli:main',
],
},
)
|
Python
| 0.000019 |
@@ -272,16 +272,28 @@
quires=%5B
+'argparse',
'Flask',
@@ -317,16 +317,36 @@
nflect',
+%0A
'SQLAlc
|
089b020b07fda88ba4679d161badb4423a75444e
|
add Python setup script
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,1196 @@
+# Based on PyPA sample project's setup script.%0A%0A%22%22%22Pymultihash installation script.%22%22%22%0A%0Aimport os.path%0Afrom setuptools import setup%0A%0A# Load readme file into long description.%0Athisdir = os.path.abspath(os.path.dirname(__file__))%0Awith open(os.path.join(thisdir, 'README.rst')) as readme:%0A long_description = readme.read()%0A%0Asetup(%0A name='pymultihash',%0A version='0.5.0a1',%0A%0A description=%22Python implementation of the multihash specification%22,%0A long_description=long_description,%0A%0A url='https://github.com/ivilata/pymultihash',%0A author=%22Ivan Vilata-i-Balaguer%22,%0A author_email='[email protected]',%0A%0A license='MIT',%0A classifiers=%5B%0A 'Development Status :: 3 - Alpha',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: MIT License',%0A 'Programming Language :: Python :: 3 :: Only',%0A 'Programming Language :: Python :: 3.4',%0A 'Programming Language :: Python :: 3.5',%0A 'Topic :: Security :: Cryptography',%0A %5D,%0A keywords=%22multihash hash digest format ASCII encoding%22,%0A%0A packages=%5B'multihash'%5D,%0A install_requires=%5B%5D,%0A extras_require=%7B%0A 'sha3': %5B'sha3'%5D,%0A 'blake2': %5B'pyblake2'%5D,%0A %7D,%0A)%0A
|
|
67b5eb144dbe14c134657ccc807343f361c5e249
|
add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,223 @@
+from distutils.core import setup%0A%0Aversion = '0.1.0.dev0'%0A%0Asetup(name='sworkflow',%0A version=version,%0A description='Simple Workflow',%0A url='https://github.com/mydeco-dev-team/sworkflow',%0A packages=%5B'sworkflow'%5D,%0A)
|
|
4161de9755b531825e83f684c964441bff9ffa7d
|
bump version to 1.0.0
|
setup.py
|
setup.py
|
"""
setup.py
"""
from setuptools import setup
version = "0.0.4"
setup(
name="nco",
version=version,
author="Joe Hamman",
author_email="[email protected]",
license="MIT",
description="""python bindings to NCO""",
packages=["nco"],
py_modules=["nco.nco", "nco.custom"],
url="https://github.com/nco/pynco",
download_url="https://raw2.github.com/nco/pynco/tarball/{0}".format(version),
keywords=["netcdf", "climate"],
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Utilities",
"Operating System :: POSIX",
"Programming Language :: Python",
],
python_requires='>=3.6',
tests_require=["dateutil", "h5py", "netcdf4", "numpy", "pytest", "scipy"],
)
|
Python
| 0 |
@@ -56,13 +56,13 @@
= %22
+1.
0.0
-.4
%22%0A%0As
|
d0b1762a098e78ee9d012628ad96d6a18e8d2565
|
Create setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,117 @@
+from distutils.core import setup%0Aimport py2exe%0A%0Asetup(console=%5B%22./server.py%22%5D, data_files=%5B('.', %5B'./config.ini'%5D)%5D)%0A
|
|
7b6610e03d4485575b18881c375f83e999d20459
|
Add setup.py #1
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,1446 @@
+from setuptools import setup%0Aimport io%0Aimport os%0A%0Ahere = os.path.abspath(os.path.dirname(__file__))%0A%0Adef read(*filenames, **kwargs):%0A encoding = kwargs.get('encoding', 'utf-8')%0A sep = kwargs.get('sep', '%5Cn')%0A buf = %5B%5D%0A for filename in filenames:%0A with io.open(filename, encoding=encoding) as f:%0A buf.append(f.read())%0A return sep.join(buf)%0A%0Along_description = read('README.md')%0A%0Asetup(%0A name='mongopool',%0A version='0.1',%0A url='http://github.com/ubervu/mongopool/',%0A description='Tool that manages your mongo clients to different clusters and maps databases to clients',%0A long_description=long_description,%0A license='Apache Software License',%0A author='UberVU',%0A install_requires=%5B'pymongo%3E=2.4'%5D,%0A # author_email='[email protected]',%0A packages=%5B'mongopool'%5D,%0A include_package_data=True,%0A platforms='any',%0A test_suite='nose.collector',%0A tests_require=%5B'nose', 'mock'%5D,%0A classifiers=%5B%0A 'Development Status :: 4 - Beta',%0A 'Environment :: Web Environment',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: Apache Software License',%0A 'Natural Language :: English',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Topic :: Database',%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0A %5D,%0A extras_require=%7B%0A 'testing': %5B'nose'%5D,%0A %7D%0A)%0A
|
|
c43802f4cc071c6baf31f8d1461ce8c96e38fa9e
|
Bump greenlet==0.4.0 to support ARM architectures.
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
import os
import sys
# Add /usr/local/include to the path for macs, fixes easy_install for several packages (like gevent and pyyaml)
if sys.platform == 'darwin':
os.environ['C_INCLUDE_PATH'] = '/usr/local/include'
version = '0.1.0'
setup( name = 'pyon',
version = version,
description = 'OOI ION Python Capability Container and Core Modules',
url = 'https://github.com/ooici/pyon',
download_url = 'http://ooici.net/releases',
license = 'Apache 2.0',
author = 'Adam R. Smith',
author_email = '[email protected]',
keywords = ['ooici','ioncore', 'pyon'],
packages = find_packages(),
entry_points = {
'nose.plugins.0.10': [
'pycc_plugin=pyon.util.pycc_plugin:PYCC',
'timer_plugin=pyon.util.timer_plugin:TestTimer',
'insulate=pyon.util.insulate:Insulate',
'insulateslave=pyon.util.insulate:InsulateSlave',
'gevent_profiler=pyon.util.nose_gevent_profiler:TestGeventProfiler'
],
'console_scripts' : [
'pycc=scripts.pycc:entry',
'control_cc=scripts.control_cc:main',
'generate_interfaces=scripts.generate_interfaces:main',
'store_interfaces=scripts.store_interfaces:main',
'json_report=scripts.json_report:main',
'clear_couch=pyon.datastore.clear_couch_util:main',
]
},
dependency_links = [
'http://ooici.net/releases',
'https://github.com/ooici/gevent-profiler/tarball/master#egg=python-gevent-profiler'
],
test_suite = 'pyon',
package_data = {'': ['*.xml']},
install_requires = [
# Patched greenlet to work on ARMS
'greenlet==0.3.1-p1',
'gevent==0.13.6',
'simplejson==2.1.6',
'msgpack-python==0.1.13',
'setproctitle==1.1.2',
'pyyaml==3.10',
'pika==0.9.5',
'httplib2>=0.7.2',
'pyzmq==2.1.7',
'gevent_zeromq==0.2.0',
'zope.interface',
'couchdb==0.8',
# 'lockfile==0.9.1',
'python-daemon==1.6',
'M2Crypto==0.21.1-pl1',
'coverage==3.5',
'nose==1.1.2',
'ipython==0.11',
'readline==6.2.1',
'mock==0.8',
'ndg-xacml==0.5.1',
'h5py==2.0.1', # see: http://www.hdfgroup.org/HDF5/release/obtain5.html
'python-gevent-profiler',
#'lxml==2.3.4', # Fails to compile on Linux ??!??
# DM related dependencies for 'tables'
# 'numpy==1.6.1',
# 'numexpr==1.4.2',
# 'cython==0.14.1',
# 'tables==2.3',
],
)
|
Python
| 0 |
@@ -1903,55 +1903,8 @@
= %5B%0A
- # Patched greenlet to work on ARMS%0A
@@ -1928,14 +1928,11 @@
==0.
-3.1-p1
+4.0
',%0A
|
3a6dd52e3cdfc5eca51d6dac4eb0701a1a04d550
|
make version 0.3.5
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='DukeDSClient',
version='0.3.4',
description='Command line tool(ddsclient) to upload/manage projects on the duke-data-service.',
url='https://github.com/Duke-GCB/DukeDSClient',
keywords='duke dds dukedataservice',
author='John Bradley',
license='MIT',
packages=['ddsc','ddsc.core'],
install_requires=[
'requests',
'PyYAML',
],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
entry_points={
'console_scripts': [
'ddsclient = ddsc.__main__:main'
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Python
| 0.000016 |
@@ -72,17 +72,17 @@
on='0.3.
-4
+5
',%0A
|
995c75162a00b89fe23f43ec12f5e9495deb7799
|
add optionnal dependancies
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:set expandtab tabstop=4 shiftwidth=4:
import os
import re
import sys
from distutils.core import setup
#some install path variables
sysconfdir = os.getenv("SYSCONFDIR", "/etc")
datarootdir = os.getenv("DATAROOTDIR", sys.prefix)
data_dir = os.path.join(sys.prefix, 'share' ,'dnscherry')
small_description = 'A simple web application to manage DNS zones'
# change requirements according to python version
if sys.version_info[0] == 2:
install_requires = [
'CherryPy >= 3.0.0',
'dnspython',
'Mako'
],
elif sys.version_info[0] == 3:
install_requires = [
'CherryPy >= 3.0.0',
'dnspython3',
'Mako'
],
else:
print('unsupported version')
exit(1)
try:
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
description = f.read()
f.close()
except IOError:
description = small_description
try:
license = open('LICENSE').read()
except IOError:
license = 'MIT'
try:
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
except ImportError:
from distutils.core import setup
PyTest = lambda x: x
# just a small function to easily install a complete directory
def get_list_files(basedir, targetdir):
return_list = []
for root, dirs, files in os.walk(basedir):
subpath = re.sub(r'' + basedir + '[\/]*', '', root)
files_list = []
for f in files:
files_list.append(os.path.join(root, f))
return_list.append((os.path.join(targetdir, subpath), files_list))
return return_list
resources_files = get_list_files('resources',
os.path.join(datarootdir, 'share', 'dnscherry'))
resources_files.append((
os.path.join(sysconfdir, 'dnscherry'),
[ 'conf/dnscherry.ini']
))
setup(
name = 'dnscherry',
version = '0.0.0',
zip_safe=False,
author = 'Pierre-Francois Carpentier',
author_email = '[email protected]',
packages = ['dnscherry'],
data_files = resources_files,
scripts = ['scripts/dnscherryd'],
url = 'https://github.com/kakwa/dnscherry',
license = license,
description = small_description,
long_description = description,
install_requires = install_requires,
tests_require=['pytest', 'mechanize'],
cmdclass={'test': PyTest},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: System Administrators',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3']
)
|
Python
| 0 |
@@ -2410,16 +2410,34 @@
scherry'
+, 'dnscherry.auth'
%5D,%0A d
@@ -2706,17 +2706,19 @@
_require
-=
+ =
%5B'pytest
@@ -2722,22 +2722,162 @@
est'
-, 'mechanize'%5D
+%5D,%0A extras_require = %7B%0A 'auth_htpasswd' : %5B'passlib'%5D,%0A 'auth_ldap' : %5B'python-ldap'%5D,%0A 'fastcgi' : %5B'flup'%5D%0A %7D
,%0A
|
b25e21745ecdc5c03b3229ba77ee51b5fdd1561d
|
Move scapy to scapyproto to avoid breaking import of scapy from inside of protocols
|
ooni/protocols/scapyproto.py
|
ooni/protocols/scapyproto.py
|
Python
| 0 |
@@ -0,0 +1,1417 @@
+import random%0Afrom zope.interface import implements%0Afrom twisted.python import usage%0Afrom twisted.plugin import IPlugin%0Afrom twisted.internet import protocol, defer%0Afrom ooni.plugoo.tests import ITest, OONITest%0Afrom ooni.plugoo.assets import Asset%0Afrom ooni.utils import log%0A%0Afrom ooni.lib.txscapy import txsr, txsend%0A%0Aclass ScapyTest(OONITest):%0A %22%22%22%0A A utility class for writing scapy driven OONI tests.%0A %22%22%22%0A%0A receive = True%0A timeout = None%0A pcapfile = 'scapytest.pcap'%0A def initialize(self, reactor=None):%0A%0A if not self.reactor:%0A from twisted.internet import reactor%0A self.reactor = reactor%0A%0A self.request = %7B%7D%0A self.response = %7B%7D%0A%0A def experiment(self, args):%0A log.msg(%22Running experiment%22)%0A if self.receive:%0A log.msg(%22Sending and receiving packets.%22)%0A d = txsr(self.build_packets(), pcapfile=self.pcapfile,%0A timeout=self.timeout)%0A else:%0A log.msg(%22Sending packets.%22)%0A d = txsend(self.build_packets())%0A%0A def finished(data):%0A log.msg(%22Finished sending%22)%0A return data%0A%0A d.addCallback(finished)%0A return d%0A%0A def build_packets(self):%0A %22%22%22%0A Override this method to build scapy packets.%0A %22%22%22%0A from scapy.all import IP, TCP%0A return IP()/TCP()%0A%0A def load_assets(self):%0A return %7B%7D%0A%0A
|
|
992191d290df8d7764a272c3b45e2f7b937456ec
|
add fib
|
misc/py3/fib.py
|
misc/py3/fib.py
|
Python
| 0.999999 |
@@ -0,0 +1,179 @@
+#!/usr/bin/env python%0A%0A# Python 3: Fibonacci series up to n%0A%0Adef fib(n):%0A a, b = 0, 1%0A while a %3C n:%0A print(a, end=' ')%0A a, b = b, a + b%0A print()%0A%0Afib(1000)%0A
|
|
abf7b0ffd86656f8311da7bfde65663d35ffd543
|
fix for using stencilview
|
kivy/uix/scrollview.py
|
kivy/uix/scrollview.py
|
'''
ScrollView widget
'''
__all__ = ('ScrollView', )
from kivy.uix.stencil import StencilView
from kivy.uix.scatter import ScatterPlane
class ScrollView(StencilView):
'''
ScrollView:
A ScrollView provides a scrollable/pannable viewport
which is clipped to the ScrollView's bounding box.
'''
def __init__(self, **kwargs):
self.viewport = ScatterPlane()
super(ScrollView, self).__init__(**kwargs)
super(ScrollView, self).add_widget(self.viewport)
self.viewport.bind(size=self.size)
def add_widget(self, widget):
self.viewport.add_widget(widget)
def remove_widget(self, widget):
self.viewport.remove_widget(widget)
def clear_widgets(self):
self.viewport.clear()
def on_touch_down(self, touch):
if self.collide_point(*touch.pos):
return super(ScrollView, self).on_touch_down(touch)
def on_touch_move(self, touch):
if self.collide_point(*touch.pos):
return super(ScrollView, self).on_touch_move(touch)
def on_touch_up(self, touch):
if self.collide_point(*touch.pos):
return super(ScrollView, self).on_touch_up(touch)
|
Python
| 0.000001 |
@@ -7,19 +7,130 @@
roll
+
View
- widget
+%0A===========%0A%0AA ScrollView provides a scrollable/pannable viewport which is clipped to the%0AScrollView's bounding box.
%0A'''
@@ -180,16 +180,20 @@
.stencil
+view
import
@@ -286,21 +286,16 @@
%0A '''
-%0A
ScrollVi
@@ -300,128 +300,62 @@
View
-:%0A A ScrollView provides a scrollable/pannable viewport%0A which is clipped to the ScrollView's bounding box
+ class. See module documentation for more informations
.%0A
|
befa79ec76752f0811b49ec323813e6e1931638d
|
Create solution.py
|
hackerrank/algorithms/implementation/medium/bigger_is_greater/py/solution.py
|
hackerrank/algorithms/implementation/medium/bigger_is_greater/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,832 @@
+def solution(s):%0A #%0A # The next permutation algorithm. For more information, please look up:%0A # %5Bhref.%5D https://www.nayuki.io/page/next-lexicographical-permutation-algorithm%0A # %5Bhref.%5D https://en.wikipedia.org/wiki/Permutation#Generation_in_lexicographic_order%0A #%0A %0A chars = list(s)%0A i = len(chars) - 1%0A %0A while i %3E 0 and chars%5Bi - 1%5D %3E= chars%5Bi%5D:%0A i -= 1%0A%0A if i == 0:%0A return None%0A%0A j = len(chars) - 1%0A%0A while chars%5Bj%5D %3C= chars%5Bi - 1%5D:%0A j -= 1%0A%0A chars%5Bi - 1%5D, chars%5Bj%5D = chars%5Bj%5D, chars%5Bi - 1%5D%0A%0A return ''.join(chars%5B:i%5D + list(reversed(chars%5Bi:%5D)))%0A %0AtestCount = int(input())%0A%0Afor testId in range(testCount):%0A word = input().strip() %0A greater = solution(word)%0A %0A if greater:%0A print(greater)%0A else:%0A print('no answer')%0A
|
|
df8ddd56ad51f0a644696cb0ff12c2e7a17c5913
|
Create lonely-pixel-i.py
|
Python/lonely-pixel-i.py
|
Python/lonely-pixel-i.py
|
Python
| 0.00022 |
@@ -0,0 +1,960 @@
+# Time: O(m * n)%0A# Space: O(m + n)%0A%0Aclass Solution(object):%0A def findLonelyPixel(self, picture):%0A %22%22%22%0A :type picture: List%5BList%5Bstr%5D%5D%0A :rtype: int%0A %22%22%22%0A rows, cols = %5B0%5D * len(picture), %5B0%5D * len(picture%5B0%5D)%0A for i in xrange(len(picture)):%0A for j in xrange(len(picture%5B0%5D)):%0A if picture%5Bi%5D%5Bj%5D == 'B':%0A rows%5Bi%5D += 1%0A cols%5Bj%5D += 1%0A%0A result = 0%0A for i in xrange(len(picture)):%0A if rows%5Bi%5D == 1:%0A for j in xrange(len(picture%5B0%5D)):%0A result += picture%5Bi%5D%5Bj%5D == 'B' and cols%5Bj%5D == 1%0A return result%0A%0A%0Aclass Solution2(object):%0A def findLonelyPixel(self, picture):%0A %22%22%22%0A :type picture: List%5BList%5Bstr%5D%5D%0A :type N: int%0A :rtype: int%0A %22%22%22%0A return sum(col.count('B') == 1 == picture%5Bcol.index('B')%5D.count('B') %5C%0A for col in zip(*picture))%0A
|
|
c1dcb46e95d5b96ecf45db2e1f466b6f99330e1c
|
Add VimwikiTask cache-ing
|
taskwiki/cache.py
|
taskwiki/cache.py
|
import copy
import vim
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = self.tw.tasks.get(uuid=key)
self.cache[key] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
def update_tasks(self):
# Select all tasks in the files that have UUIDs
uuids = [t['uuid'] for t in self.cache.values() if t.saved]
# Get them out of TaskWarrior at once
tasks = self.tw.filter(uuid=','.join(tasks))
# Update each task in the cache
for task in tasks:
self.cache[task['uuid']] = task
|
Python
| 0 |
@@ -203,32 +203,77 @@
):%0A self.
+task_cache = dict()%0A self.vimwikitask_
cache = dict()%0A
@@ -325,16 +325,108 @@
, key):%0A
+ # String keys refer to the Task objects%0A if type(key) in (str, unicode):%0A
@@ -433,24 +433,29 @@
task = self.
+task_
cache.get(ke
@@ -458,16 +458,20 @@
t(key)%0A%0A
+
@@ -491,32 +491,36 @@
ne:%0A
+
task = self.tw.t
@@ -546,29 +546,38 @@
+
+
self.
+task_
cache%5Bkey%5D =
@@ -591,16 +591,20 @@
+
+
return t
@@ -611,25 +611,498 @@
ask%0A
-%0A def __iter__
+ # Integer keys (line numbers) refer to the VimwikiTask objects%0A elif type(key) is int:%0A vimwikitask = self.vimwikitask_cache.get(key)%0A%0A if vimwikitask is None:%0A vimwikitask = VimwikiTask.from_line(self, key)%0A return vimwikitask # May return None if the line has no task%0A # Anything else is wrong%0A else:%0A raise ValueError(%22Wrong key type: %25s (%25s)%22 %25 (key, type(key)))%0A%0A def iterate_vimwiki_tasks
(sel
@@ -1141,24 +1141,29 @@
y.copy(self.
+task_
cache)%0A
@@ -1314,17 +1314,18 @@
f all(%5Bt
-.
+%5B'
line_num
@@ -1327,16 +1327,18 @@
e_number
+'%5D
not in
@@ -1523,24 +1523,69 @@
self.
+task_cache = dict()%0A self.vimwikitask_
cache = dict
@@ -1709,24 +1709,29 @@
r t in self.
+task_
cache.values
@@ -1930,16 +1930,21 @@
self.
+task_
cache%5Bta
|
67c3c0e3c165dc73f548cff57d6cb390614d5aad
|
Bring back old watcher module
|
virtool/watcher.py
|
virtool/watcher.py
|
Python
| 0 |
@@ -0,0 +1,2525 @@
+import os%0Aimport time%0Aimport logging%0A%0Afrom virtool.utils import file_stats%0Afrom setproctitle import setproctitle%0Afrom multiprocessing import Process%0Afrom inotify.adapters import Inotify%0A%0Alogger = logging.getLogger(__name__)%0A%0ATYPE_NAME_DICT = %7B%0A %22IN_CREATE%22: %22create%22,%0A %22IN_MODIFY%22: %22modify%22,%0A %22IN_DELETE%22: %22delete%22,%0A %22IN_MOVED_FROM%22: %22delete%22,%0A %22IN_CLOSE_WRITE%22: %22close%22%0A%7D%0A%0A%0Aprojector = %5B%0A %22_id%22,%0A %22_version%22,%0A %22name%22,%0A %22size_end%22,%0A %22size_now%22,%0A %22timestamp%22,%0A %22file_type%22,%0A %22created%22,%0A %22reserved%22,%0A %22ready%22%0A%5D%0A%0A%0Aclass Watcher(Process):%0A%0A def __init__(self, path, queue, interval=0.300):%0A super().__init__()%0A%0A self.path = path%0A self.queue = queue%0A self.interval = interval%0A self.notifier = Inotify()%0A%0A def run(self):%0A%0A setproctitle(%22virtool-inotify%22)%0A%0A self.notifier.add_watch(bytes(self.path, encoding=%22utf-8%22))%0A%0A last_modification = time.time()%0A%0A try:%0A for event in self.notifier.event_gen():%0A if event is not None:%0A _, type_names, _, filename = event%0A%0A if filename and type_names%5B0%5D in TYPE_NAME_DICT:%0A assert len(type_names) == 1%0A%0A action = TYPE_NAME_DICT%5Btype_names%5B0%5D%5D%0A%0A filename = filename.decode()%0A%0A now = time.time()%0A%0A if action in %5B%22create%22, %22modify%22, %22close%22%5D:%0A file_entry = file_stats(os.path.join(self.path, filename))%0A file_entry%5B%22filename%22%5D = filename%0A%0A if action == %22modify%22 and (now - last_modification) %3E self.interval:%0A self.queue.put(%7B%0A %22action%22: action,%0A %22file%22: file_entry%0A %7D)%0A%0A last_modification = now%0A%0A if action in %5B%22create%22, %22close%22%5D:%0A self.queue.put(%7B%0A %22action%22: action,%0A %22file%22: file_entry%0A %7D)%0A%0A if action == %22delete%22:%0A self.queue.put(%7B%0A %22action%22: %22delete%22,%0A %22file%22: filename%0A %7D)%0A%0A except KeyboardInterrupt:%0A logging.debug(%22Stopped file watcher%22)
|
|
4d08ff430eba96ebef3f0824fe83f5bc2a236675
|
add share_mem
|
multiprocessing/share_mem.py
|
multiprocessing/share_mem.py
|
Python
| 0 |
@@ -0,0 +1,357 @@
+#!/usr/bin/env python%0A%0Afrom multiprocessing import Process, Value, Array%0A%0Adef f(n, a):%0A n.value = 3.1415927%0A for i in range(len(a)):%0A a%5Bi%5D = -a%5Bi%5D%0A%0Aif __name__ == '__main__':%0A num = Value('d', 0.0)%0A arr = Array('i', range(10))%0A%0A p = Process(target=f, args=(num, arr))%0A p.start()%0A p.join()%0A%0A print(num.value)%0A print(arr%5B:%5D)%0A
|
|
119aabe89912c324d1588601c9cbc4b4a48e16ae
|
Add restarting_flup.py
|
restarting_flup.py
|
restarting_flup.py
|
Python
| 0.000007 |
@@ -0,0 +1,870 @@
+#!/usr/bin/env python%0A%22%22%22%0A%0AThis is the same as the usual .fcgi file%5B1%5D for using FastCGI with flup,%0Aexcept that this one terminates itself when the .fcgi file%E2%80%99s modification%0Adate changes. Assuming you have something%5B2%5D that restarts FastCGI processes%0Aas needed (which you should anyway), this effectively allows you to reload%0Athe application by just %60touch%60ing one file.%0A%0A%5B1%5D http://flask.pocoo.org/docs/deploying/fastcgi/%0A%5B2%5D Something like Circus, Supervisord, or Lighttpd with %60bin-path%60 configured.%0A%0A%22%22%22%0A%0Afrom os.path import getmtime%0Afrom flup.server.fcgi import WSGIServer%0A%0A%0ASTART_TIME = getmtime(__file__)%0A%0A%0Aclass RestartingServer(WSGIServer):%0A def _mainloopPeriodic(self):%0A WSGIServer._mainloopPeriodic(self)%0A if getmtime(__file__) != START_TIME:%0A self._keepGoing = False%0A%0A%0Afrom YOUR_APPLICATION import app%0ARestartingServer(app).run()%0A
|
|
7496159322a173bb6265aed2dac4e50ad64de858
|
Add base fullfill service
|
service.py
|
service.py
|
Python
| 0 |
@@ -0,0 +1,458 @@
+from flask import Flask%0Afrom flask import jsonify%0Afrom flask import request%0A%0Aapp = Flask(__name__)%0A%[email protected](%22/chainBot%22, methods=%5B'POST'%5D)%0Adef chainBot():%0A print(request.data)%0A return jsonify(%7B%0A %22speech%22: %22My Test Speech%22,%0A %22displayText%22: %22My Test Text%22,%0A %22data%22: %7B%7D,%0A %22contextOut%22: %5B%5D,%0A %22source%22: %22%22%0A %7D), 200, %7B'Content-Type': 'text/css; charset=utf-8'%7D%0A%0A%0Aif __name__ == %22__main__%22:%0A app.run(%220.0.0.0%22, 80)%0A
|
|
837fd89008a430282f663e64fc03ce3df84018c6
|
Use build/repack_action.gypi instead of doing it manually.
|
chromecast/chromecast.gyp
|
chromecast/chromecast.gyp
|
# Copyright 2014 The Chromium Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'chromecast_branding%': 'Chromium',
},
'target_defaults': {
'include_dirs': [
'..', # Root of Chromium checkout
],
},
'targets': [
{
'target_name': 'cast_common',
'type': '<(component)',
'dependencies': [
'../base/base.gyp:base',
],
'sources': [
'common/cast_paths.cc',
'common/cast_paths.h',
],
'conditions': [
['chromecast_branding=="Chrome"', {
'dependencies': [
'internal/chromecast_internal.gyp:cast_common_internal',
],
}],
],
},
{
'target_name': 'cast_service',
'type': '<(component)',
'dependencies': [
'../skia/skia.gyp:skia',
],
'sources': [
'service/cast_service.cc',
'service/cast_service.h',
],
'conditions': [
['chromecast_branding=="Chrome"', {
'dependencies': [
'internal/chromecast_internal.gyp:cast_service_internal',
],
}, {
'dependencies': [
'../base/base.gyp:base',
'../content/content.gyp:content',
],
'sources': [
'service/cast_service_simple.cc',
'service/cast_service_simple.h',
],
}],
],
},
{
'target_name': 'cast_shell_resources',
'type': 'none',
# Place holder for cast_shell specific resources.
},
{
'target_name': 'cast_shell_pak',
'type': 'none',
'dependencies': [
'cast_shell_resources',
'../content/browser/devtools/devtools_resources.gyp:devtools_resources',
'../content/content_resources.gyp:content_resources',
'../net/net.gyp:net_resources',
'../ui/resources/ui_resources.gyp:ui_resources',
'../ui/strings/ui_strings.gyp:ui_strings',
'../webkit/webkit_resources.gyp:webkit_resources',
'../webkit/webkit_resources.gyp:webkit_strings',
],
'variables': {
'repack_path': '../tools/grit/grit/format/repack.py',
},
'actions': [
{
'action_name': 'repack_cast_shell_pack',
'variables': {
'pak_inputs': [
'<(SHARED_INTERMEDIATE_DIR)/content/content_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/net/net_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/app_locale_settings/app_locale_settings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/ui_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/webui_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/ui/strings/ui_strings_en-US.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/devtools_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/blink_resources.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_resources_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_en-US.pak',
],
},
'inputs': [
'<(repack_path)',
'<@(pak_inputs)',
],
'action': ['python', '<(repack_path)', '<@(_outputs)',
'<@(pak_inputs)'],
'outputs': [
'<(PRODUCT_DIR)/cast_shell.pak',
],
},
],
},
{
'target_name': 'cast_shell',
'type': 'executable',
'dependencies': [
'cast_common',
'cast_service',
'cast_shell_pak',
'../ui/aura/aura.gyp:aura_test_support',
'../content/content.gyp:content',
'../content/content.gyp:content_app_browser',
'../skia/skia.gyp:skia',
],
'sources': [
'net/network_change_notifier_cast.cc',
'net/network_change_notifier_cast.h',
'net/network_change_notifier_factory_cast.cc',
'net/network_change_notifier_factory_cast.h',
'shell/app/cast_main.cc',
'shell/app/cast_main_delegate.cc',
'shell/app/cast_main_delegate.h',
'shell/browser/cast_browser_context.cc',
'shell/browser/cast_browser_context.h',
'shell/browser/cast_browser_main_parts.cc',
'shell/browser/cast_browser_main_parts.h',
'shell/browser/cast_content_browser_client.cc',
'shell/browser/cast_content_browser_client.h',
'shell/browser/cast_http_user_agent_settings.cc',
'shell/browser/cast_http_user_agent_settings.h',
'shell/browser/geolocation/cast_access_token_store.cc',
'shell/browser/geolocation/cast_access_token_store.h',
'shell/browser/url_request_context_factory.cc',
'shell/browser/url_request_context_factory.h',
'shell/common/cast_content_client.cc',
'shell/common/cast_content_client.h',
'shell/renderer/cast_content_renderer_client.cc',
'shell/renderer/cast_content_renderer_client.h',
],
'conditions': [
['chromecast_branding=="Chrome"', {
'dependencies': [
'internal/chromecast_internal.gyp:cast_gfx_internal',
],
}, {
'dependencies': [
'../ui/ozone/ozone.gyp:eglplatform_shim_x11',
],
}],
],
},
], # end of targets
}
|
Python
| 0.000002 |
@@ -2174,100 +2174,8 @@
%5D,%0A
- 'variables': %7B%0A 'repack_path': '../tools/grit/grit/format/repack.py',%0A %7D,%0A
@@ -3113,90 +3113,54 @@
-%7D,%0A 'in
+ 'pak_out
put
-s
':
-%5B%0A '%3C(repack_path)',%0A '%3C@(pak_inputs)
+'%3C(PRODUCT_DIR)/cast_shell.pak
',%0A
@@ -3160,33 +3160,33 @@
pak',%0A
-%5D
+%7D
,%0A 'act
@@ -3186,180 +3186,50 @@
'
-action': %5B'python', '%3C(repack_path)', '%3C@(_outputs)',%0A '%3C@(pak_inputs)'%5D,%0A 'outputs': %5B%0A '%3C(PRODUCT_DIR)/cast_shell.pak',%0A
+includes': %5B '../build/repack_action.gypi'
%5D,%0A
|
5b01f26d92a32964bcc97cbf9429177bce7c89be
|
add tests for progress indicator
|
tests/test_util.py
|
tests/test_util.py
|
Python
| 0 |
@@ -0,0 +1,624 @@
+# -*- coding: utf-8 -*-%0Afrom StringIO import StringIO%0Afrom biseqt.util import ProgressIndicator%0A%0A%0Adef test_progress_indicator():%0A logs = StringIO()%0A ProgressIndicator.write = lambda self, message: logs.write(message)%0A%0A indic = ProgressIndicator(num_total=1)%0A indic.start()%0A indic.progress()%0A assert logs.getvalue().strip() == '0/1 %5Cr1/1', %5C%0A 'Counting progress indicator works'%0A%0A logs = StringIO()%0A indic = ProgressIndicator(num_total=1, percentage=True)%0A indic.start()%0A indic.progress()%0A assert logs.getvalue().strip() == '0%25 %5Cr100%25', %5C%0A 'Percentage progress indicator works'%0A
|
|
de7aee058348c00d2cdf244df102010b422e941b
|
Add a place holder for the PSNR metric
|
toolbox/metrics.py
|
toolbox/metrics.py
|
Python
| 0.000009 |
@@ -0,0 +1,56 @@
+def psnr(y_true, y_pred):%0A raise NotImplementedError%0A
|
|
0da51215709f338e77acfa6e7933595d0c1df95d
|
Create SIP OPTIONS sender/receiver.
|
networks/sip.py
|
networks/sip.py
|
Python
| 0 |
@@ -0,0 +1,2246 @@
+# -*- coding: utf-8 -*-%0A%0Aimport argparse%0Aimport socket%0A%0ACRLF = '%5Cr%5Cn'%0A%0A%0Adef send_sip_options(server_host, server_port, client_host, client_port, verbose=True):%0A %22%22%22Sends SIP OPTIONS.%0A%0A :param str server_host: SIP server host (IP address).%0A :param int server_port: SIP server port.%0A :param str client_host: Local client host (IP address).%0A :param int client_port: Local client port.%0A :param bool verbose: If True prints out the request payload.%0A%0A :return: SIP server response.%0A :rtype: str%0A %22%22%22%0A with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock:%0A sock.connect((server_host, server_port))%0A payload_fields = (%0A 'OPTIONS sip:127.0.0.1:5060 SIP/2.0',%0A f'Via: SIP/2.0/UDP %7Bclient_host%7D:%7Bclient_port%7D;rport;branch=BRANCH',%0A 'Max-Forwards: 70',%0A f'From: %3Csip:%7Bclient_host%7D%3E;tag=TAG',%0A 'To: %3Csip:127.0.0.1%3E',%0A 'Call-ID: 1',%0A 'CSeq: 1 OPTIONS',%0A 'Content-Length: 0',%0A )%0A payload = CRLF.join(payload_fields).encode('utf-8')%0A if verbose:%0A print('===================')%0A print('SIP server request:')%0A print('===================')%0A print(payload.decode().strip())%0A print('--------------------')%0A print()%0A sock.send(payload)%0A return sock.recv(4096).decode('utf-8')%0A%0A%0Adef main():%0A # prepare argument parser%0A parser = argparse.ArgumentParser()%0A parser.add_argument('server_host', help='SIP server hostname or IP address')%0A parser.add_argument('server_port', nargs='?', default=5060, help='SIP server port (default=5060)')%0A%0A args = parser.parse_args()%0A%0A hostname = socket.gethostname()%0A local_ip = socket.gethostbyname(hostname)%0A%0A client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)%0A client.bind((local_ip, 0)) # get random port%0A client_host, client_port = client.getsockname()%0A%0A response = send_sip_options(args.server_host, int(args.server_port), client_host, client_port)%0A print('====================')%0A print('SIP server response:')%0A print('====================')%0A print(response.strip())%0A print('--------------------')%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
3b9a0c0b83dda484586ea9c19091b7da1cae55d1
|
prepare a test file for python
|
test_riak3k.py
|
test_riak3k.py
|
Python
| 0.000002 |
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0Aimport nose%0Afrom nose.tools import *%0A%0Aimport riak3k%0A
|
|
17c9e1a16c5c16c1b49836cc376ddd6408b73de0
|
make the dispatcher a global variable and the deprecation warning more developer friendly
|
coherence/extern/louie.py
|
coherence/extern/louie.py
|
"""
Wrapper module for the louie implementation
"""
import warnings
from coherence.dispatcher import Dispatcher
class Any(object): pass
class All(object): pass
class Anonymous(object): pass
# fake the API
class Dummy(object): pass
signal = Dummy()
sender = Dummy()
#senders
sender.Anonymous = Anonymous
sender.Any = Any
#signals
signal.All = All
# a slightly less raise-y-ish implementation as louie was not so picky, too
class GlobalDispatcher(Dispatcher):
def connect(self, signal, callback, *args, **kw):
if not signal in self.receivers:
# ugly hack
self.receivers[signal] = []
return Dispatcher.connect(self, signal, callback, *args, **kw)
def _get_receivers(self, signal):
try:
return self.receivers[signal]
except KeyError:
return []
_global_dispatcher = GlobalDispatcher()
_global_receivers_pool = {}
def connect(receiver, signal=All, sender=Any, weak=True):
callback = receiver
if signal in (Any, All):
raise NotImplemented("This is not allowed. Signal HAS to be something")
if sender not in (Any, All):
warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based")
receiver = _global_dispatcher.connect(signal, callback)
_global_receivers_pool[(callback, signal)] = receiver
return receiver
def disconnect(receiver, signal=All, sender=Any, weak=True):
callback = receiver
if signal in (Any, All):
raise NotImplemented("This is not allowed. Signal HAS to be something")
if sender not in (Any, All):
warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based")
receiver = _global_receivers_pool.pop((callback, signal))
return _global_dispatcher.disconnect(receiver)
def send(signal=All, sender=Anonymous, *arguments, **named):
if signal in (Any, All):
raise NotImplemented("This is not allowed. Signal HAS to be something")
if sender not in (Anonymous, None):
warnings.warn("Seriously! Use the coherence.dispatcher. It IS object based")
# the first value of the callback shall always be the signal:
return _global_dispatcher.save_emit(signal, *arguments, **named)
def send_minimal(signal=All, sender=Anonymous, *arguments, **named):
return send(signal, sender, *arguments, **named)
def send_exact(signal=All, sender=Anonymous, *arguments, **named):
return send(signal, sender, *arguments, **named)
def send_robust(signal=All, sender=Anonymous, *arguments, **named):
return send(signal, sender, *arguments, **named)
|
Python
| 0 |
@@ -835,16 +835,42 @@
urn %5B%5D%0A%0A
+global _global_dispatcher%0A
_global_
@@ -930,16 +930,145 @@
l = %7B%7D%0A%0A
+def _display_deprecation_warning():%0A warnings.warn(%22extern.louie will soon be deprecated in favor of coherence.dispatcher.%22)%0A%0A
def conn
@@ -1295,83 +1295,37 @@
-warnings.warn(%22Seriously! Use the coherence.dispatcher. It IS object based%22
+_display_deprecation_warning(
)%0A
@@ -1700,83 +1700,37 @@
-warnings.warn(%22Seriously! Use the coherence.dispatcher. It IS object based%22
+_display_deprecation_warning(
)%0A
@@ -2063,83 +2063,37 @@
-warnings.warn(%22Seriously! Use the coherence.dispatcher. It IS object based%22
+_display_deprecation_warning(
)%0A
|
7cf653b7f132b35a69230fcf32dc88bd1b7e29d1
|
OrderWidget.is_hidden should evaluate False
|
mezzanine/core/forms.py
|
mezzanine/core/forms.py
|
from __future__ import unicode_literals
from future.builtins import str
from uuid import uuid4
from django import forms
from django.forms.extras.widgets import SelectDateWidget
from django.utils.safestring import mark_safe
from mezzanine.conf import settings
class Html5Mixin(object):
"""
Mixin for form classes. Adds HTML5 features to forms for client
side validation by the browser, like a "required" attribute and
"email" and "url" input types.
"""
def __init__(self, *args, **kwargs):
super(Html5Mixin, self).__init__(*args, **kwargs)
if hasattr(self, "fields"):
# Autofocus first field
first_field = next(iter(self.fields.values()))
first_field.widget.attrs["autofocus"] = ""
for name, field in self.fields.items():
if settings.FORMS_USE_HTML5:
if isinstance(field, forms.EmailField):
self.fields[name].widget.input_type = "email"
elif isinstance(field, forms.URLField):
self.fields[name].widget.input_type = "url"
if field.required:
self.fields[name].widget.attrs["required"] = ""
_tinymce_js = ()
if settings.GRAPPELLI_INSTALLED:
_tinymce_js = ("grappelli/tinymce/jscripts/tiny_mce/tiny_mce.js",
settings.TINYMCE_SETUP_JS)
class TinyMceWidget(forms.Textarea):
"""
Setup the JS files and targetting CSS class for a textarea to
use TinyMCE.
"""
class Media:
js = _tinymce_js
def __init__(self, *args, **kwargs):
super(TinyMceWidget, self).__init__(*args, **kwargs)
self.attrs["class"] = "mceEditor"
class OrderWidget(forms.HiddenInput):
"""
Add up and down arrows for ordering controls next to a hidden
form field.
"""
def render(self, *args, **kwargs):
rendered = super(OrderWidget, self).render(*args, **kwargs)
arrows = ["<img src='%sadmin/img/admin/arrow-%s.gif' />" %
(settings.STATIC_URL, arrow) for arrow in ("up", "down")]
arrows = "<span class='ordering'>%s</span>" % "".join(arrows)
return rendered + mark_safe(arrows)
class DynamicInlineAdminForm(forms.ModelForm):
"""
Form for ``DynamicInlineAdmin`` that can be collapsed and sorted
with drag and drop using ``OrderWidget``.
"""
class Media:
js = ("mezzanine/js/jquery-ui-1.9.1.custom.min.js",
"mezzanine/js/admin/dynamic_inline.js",)
class SplitSelectDateTimeWidget(forms.SplitDateTimeWidget):
"""
Combines Django's ``SelectDateTimeWidget`` and ``SelectDateWidget``.
"""
def __init__(self, attrs=None, date_format=None, time_format=None):
date_widget = SelectDateWidget(attrs=attrs)
time_widget = forms.TimeInput(attrs=attrs, format=time_format)
forms.MultiWidget.__init__(self, (date_widget, time_widget), attrs)
class CheckboxSelectMultiple(forms.CheckboxSelectMultiple):
"""
Wraps render with a CSS class for styling.
"""
def render(self, *args, **kwargs):
rendered = super(CheckboxSelectMultiple, self).render(*args, **kwargs)
return mark_safe("<span class='multicheckbox'>%s</span>" % rendered)
def get_edit_form(obj, field_names, data=None, files=None):
"""
Returns the in-line editing form for editing a single model field.
"""
# Map these form fields to their types defined in the forms app so
# we can make use of their custom widgets.
from mezzanine.forms import fields
widget_overrides = {
forms.DateField: fields.DATE,
forms.DateTimeField: fields.DATE_TIME,
forms.EmailField: fields.EMAIL,
}
class EditForm(forms.ModelForm):
"""
In-line editing form for editing a single model field.
"""
app = forms.CharField(widget=forms.HiddenInput)
model = forms.CharField(widget=forms.HiddenInput)
id = forms.CharField(widget=forms.HiddenInput)
fields = forms.CharField(widget=forms.HiddenInput)
class Meta:
model = obj.__class__
fields = field_names.split(",")
def __init__(self, *args, **kwargs):
super(EditForm, self).__init__(*args, **kwargs)
self.uuid = str(uuid4())
for f in self.fields.keys():
field_class = self.fields[f].__class__
try:
widget = fields.WIDGETS[widget_overrides[field_class]]
except KeyError:
pass
else:
self.fields[f].widget = widget()
css_class = self.fields[f].widget.attrs.get("class", "")
css_class += " " + field_class.__name__.lower()
self.fields[f].widget.attrs["class"] = css_class
self.fields[f].widget.attrs["id"] = "%s-%s" % (f, self.uuid)
if settings.FORMS_USE_HTML5 and self.fields[f].required:
self.fields[f].widget.attrs["required"] = ""
initial = {"app": obj._meta.app_label, "id": obj.id,
"fields": field_names, "model": obj._meta.object_name.lower()}
return EditForm(instance=obj, initial=initial, data=data, files=files)
|
Python
| 0.999999 |
@@ -1839,32 +1839,94 @@
field.%0A %22%22%22%0A
+%0A @property%0A def is_hidden(self):%0A return False%0A%0A
def render(s
|
0475e35bb6e0bab1d61c038ddd902e32478211d7
|
Create whois.py
|
whois.py
|
whois.py
|
Python
| 0.000003 |
@@ -0,0 +1,277 @@
+# %E0%B8%AD%E0%B9%88%E0%B8%B2%E0%B8%99%E0%B8%9A%E0%B8%97%E0%B8%84%E0%B8%A7%E0%B8%B2%E0%B8%A1%E0%B9%84%E0%B8%94%E0%B9%89%E0%B8%97%E0%B8%B5%E0%B9%88 https://python3.wannaphong.com/2016/12/%E0%B8%94%E0%B8%B6%E0%B8%87%E0%B8%82%E0%B9%89%E0%B8%AD%E0%B8%A1%E0%B8%B9%E0%B8%A5-whois-%E0%B9%82%E0%B8%94%E0%B9%80%E0%B8%A1%E0%B8%99%E0%B8%94%E0%B9%89%E0%B8%A7%E0%B8%A2-python.html%0A# %E0%B9%80%E0%B8%82%E0%B8%B5%E0%B8%A2%E0%B8%99%E0%B9%82%E0%B8%94%E0%B8%A2 %E0%B8%A7%E0%B8%A3%E0%B8%A3%E0%B8%93%E0%B8%9E%E0%B8%87%E0%B8%A9%E0%B9%8C %E0%B8%A0%E0%B8%B1%E0%B8%97%E0%B8%97%E0%B8%B4%E0%B8%A2%E0%B9%84%E0%B8%9E%E0%B8%9A%E0%B8%B9%E0%B8%A5%E0%B8%A2%E0%B9%8C%0Aimport whois%0Aw = whois.whois('abc.xyz') # %E0%B8%81%E0%B8%A3%E0%B8%AD%E0%B8%81%E0%B9%82%E0%B8%94%E0%B9%80%E0%B8%A1%E0%B8%99%E0%B8%97%E0%B8%B5%E0%B9%88%E0%B8%95%E0%B9%89%E0%B8%AD%E0%B8%87%E0%B8%81%E0%B8%B2%E0%B8%A3%E0%B8%82%E0%B9%89%E0%B8%AD%E0%B8%A1%E0%B8%B9%E0%B8%A5 Whois%0Aprint(w.expiration_date) # %E0%B8%A7%E0%B8%B1%E0%B9%89%E0%B8%99%E0%B8%AB%E0%B8%A1%E0%B8%94%E0%B8%AD%E0%B8%B2%E0%B8%A2%E0%B8%B8%0Aprint(w.text) # %E0%B8%A3%E0%B8%B2%E0%B8%A2%E0%B8%A5%E0%B8%B0%E0%B9%80%E0%B8%AD%E0%B8%B5%E0%B8%A2%E0%B8%94%E0%B9%82%E0%B8%94%E0%B9%80%E0%B8%A1%E0%B8%99%0A
|
|
47f0edcbe4dd4902e679d4f1e384be1795c3d465
|
Add str() calls around messages in tty
|
lib/spack/spack/tty.py
|
lib/spack/spack/tty.py
|
##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
import spack
from spack.color import *
indent = " "
def msg(message, *args):
cprint("@*b{==>} %s" % cescape(message))
for arg in args:
print indent + str(arg)
def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
cprint("@%s{==>} %s" % (format, cescape(message)))
for arg in args:
print indent + str(arg)
def verbose(message, *args):
if spack.verbose:
info(message, *args, format='c')
def debug(*args):
if spack.debug:
info("Debug: " + message, *args, format='*g')
def error(message, *args):
info("Error: " + message, *args, format='*r')
def warn(message, *args):
info("Warning: " + message, *args, format='*Y')
def die(message, *args):
error(message, *args)
sys.exit(1)
def pkg(message):
"""Outputs a message with a package icon."""
import platform
from version import Version
mac_ver = platform.mac_ver()[0]
if mac_ver and Version(mac_ver) >= Version('10.7'):
print u"\U0001F4E6" + indent,
else:
cwrite('@*g{[+]} ')
print message
def get_number(prompt, **kwargs):
default = kwargs.get('default', None)
abort = kwargs.get('abort', None)
if default is not None and abort is not None:
prompt += ' (default is %s, %s to abort) ' % (default, abort)
elif default is not None:
prompt += ' (default is %s) ' % default
elif abort is not None:
prompt += ' (%s to abort) ' % abort
number = None
while number is None:
ans = raw_input(prompt)
if ans == str(abort):
return None
if ans:
try:
number = int(ans)
if number < 1:
msg("Please enter a valid number.")
number = None
except ValueError:
msg("Please enter a valid number.")
elif default is not None:
number = default
return number
|
Python
| 0.000001 |
@@ -1515,32 +1515,36 @@
format, cescape(
+str(
message)))%0A f
@@ -1537,16 +1537,17 @@
ssage)))
+)
%0A for
@@ -1654,31 +1654,36 @@
info(
+str(
message
+)
, *args, for
@@ -1752,31 +1752,36 @@
%22Debug: %22 +
+str(
message
+)
, *args, for
@@ -1836,31 +1836,36 @@
%22Error: %22 +
+str(
message
+)
, *args, for
@@ -1925,23 +1925,28 @@
ng: %22 +
+str(
message
+)
, *args,
|
416872a1e7191f62dd2353f3e221a9e9c93c161f
|
Add tests for utils.
|
test_utils.py
|
test_utils.py
|
Python
| 0 |
@@ -0,0 +1,1165 @@
+%22%22%22%0ATests for the utils.py file%0A%22%22%22%0Aimport axelrod as axl%0Aimport unittest%0Aimport utils%0Aimport tempfile%0Aimport csv%0A%0Aclass TestUtils(unittest.TestCase):%0A %22%22%22%0A Simple tests for the utils%0A %22%22%22%0A%0A axl.seed(0)%0A players = %5Bs() for s in axl.demo_strategies%5D%0A tournament = axl.Tournament(players)%0A results = tournament.play()%0A%0A def test_label(self):%0A label = utils.label(%22Test%22, self.results)%0A expected_label = %22%7B%7D - turns: %7B%7D, repetitions: %7B%7D, strategies: %7B%7D. %22.format(%22Test%22,%0A self.tournament.turns, self.tournament.repetitions,%0A len(self.tournament.players))%0A%0A def test_summary_data(self):%0A tmpfile = tempfile.NamedTemporaryFile()%0A sd = utils.summary_data(self.results, tmpfile.name)%0A self.assertEqual(len(sd), len(self.tournament.players))%0A self.assertEqual(%5Bplayer.Name for player in sd%5D,%0A self.results.ranked_names)%0A%0A with open(tmpfile.name, %22r%22) as csvfile:%0A csvreader = csv.reader(csvfile)%0A ranked_names = %5Brow%5B1%5D for row in csvreader%5D%5B1:%5D%0A self.assertEqual(ranked_names, self.results.ranked_names)%0A
|
|
b28ace414c7087936ec14665026b78413b1f3791
|
Create __init__.py
|
neutron_dynamic_routing/neutron/cmd/eventlet/agents/__init__.py
|
neutron_dynamic_routing/neutron/cmd/eventlet/agents/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
bf7ad11cc32af83aab6496ac7d7b911bea3d7876
|
Use new API.
|
l10n_it_pec/model/partner.py
|
l10n_it_pec/model/partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Associazione Odoo Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
class ResPartner(orm.Model):
_inherit = "res.partner"
_columns = {
'pec_mail': fields.char(
'PEC Mail'
),
}
|
Python
| 0 |
@@ -972,31 +972,30 @@
nerp
-.osv
import
+ models,
fields
-, orm
%0A%0A%0Ac
@@ -1014,11 +1014,14 @@
ner(
-orm
+models
.Mod
@@ -1058,42 +1058,21 @@
er%22%0A
+%0A
-_columns = %7B%0A '
pec_mail
': f
@@ -1071,18 +1071,18 @@
mail
-':
+ =
fields.
char
@@ -1081,50 +1081,28 @@
lds.
-c
+C
har(
-%0A 'PEC Mail'%0A ),%0A %7D
+string='PEC Mail')
%0A
|
489d883af246e7de727ea14e01ae4a0cd17f88eb
|
fix emoji on python3.4
|
limbo/plugins/emoji.py
|
limbo/plugins/emoji.py
|
"""!emoji <n> will return n random emoji"""
import re
import random
from emojicodedict import emojiCodeDict
def randomelt(dic):
keys = dic.keys()
i = random.randint(0, len(keys) - 1)
return dic[keys[i]]
def emoji(n=1):
emoji = []
for i in range(n):
emoji.append(randomelt(emojiCodeDict))
return "".join(emoji)
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"(!emoji)\s*(\d+)*", text)
if not match:
return
n = 1 if not match[0][1] else int(match[0][1])
return emoji(n)
|
Python
| 0 |
@@ -133,16 +133,21 @@
keys =
+list(
dic.keys
@@ -148,16 +148,17 @@
c.keys()
+)
%0A i =
|
b35908d8ed8257bfde75953c360112f87c0eccd3
|
add api/urls.py
|
django_comments_xtd/api/urls.py
|
django_comments_xtd/api/urls.py
|
Python
| 0 |
@@ -0,0 +1,838 @@
+from django.urls import path, re_path%0A%0Afrom .views import (%0A CommentCount, CommentCreate, CommentList,%0A CreateReportFlag, ToggleFeedbackFlag,%0A preview_user_avatar,%0A)%0A%0Aurlpatterns = %5B%0A path('comment/', CommentCreate.as_view(),%0A name='comments-xtd-api-create'),%0A path('preview/', preview_user_avatar,%0A name='comments-xtd-api-preview'),%0A re_path(r'%5E(?P%3Ccontent_type%3E%5Cw+%5B-%5D%7B1%7D%5Cw+)/(?P%3Cobject_pk%3E%5B-%5Cw%5D+)/$',%0A CommentList.as_view(), name='comments-xtd-api-list'),%0A re_path(%0A r'%5E(?P%3Ccontent_type%3E%5Cw+%5B-%5D%7B1%7D%5Cw+)/(?P%3Cobject_pk%3E%5B-%5Cw%5D+)/count/$',%0A CommentCount.as_view(), name='comments-xtd-api-count'),%0A path('feedback/', ToggleFeedbackFlag.as_view(),%0A name='comments-xtd-api-feedback'),%0A path('flag/', CreateReportFlag.as_view(),%0A name='comments-xtd-api-flag'),%0A%5D%0A
|
|
4a9a844353a565a596148e31c17dad6b57cda081
|
Add text encoding functionality.
|
txtencoding.py
|
txtencoding.py
|
Python
| 0.000001 |
@@ -0,0 +1,701 @@
+#!/usr/bin/env python3%0A%0Afrom chardet.universaldetector import UniversalDetector%0A%0A%0Aclass TxtEncoding:%0A def __init__(self):%0A # inspired by https://chardet.readthedocs.org/en/latest/usage.html#example-detecting-encodings-of-multiple-files%0A self.detector = UniversalDetector()%0A%0A def detectEncoding(self, fname):%0A '''Detect the encoding of file fname.%0A Returns a dictionary with %7B'encoding', 'confidence'%7D fields.'''%0A self.detector.reset()%0A with open(fname, 'rb') as f:%0A for line in f:%0A self.detector.feed(line)%0A if self.detector.done: break%0A self.detector.close()%0A return self.detector.result%0A %0A
|
|
ff7d96204d528e65faec8312e98fd727bd163d08
|
Save and load files.
|
scr/model/model.py
|
scr/model/model.py
|
Python
| 0 |
@@ -0,0 +1,2360 @@
+# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this%0A# file, You can obtain one at http://mozilla.org/MPL/2.0/.%0A%0A%22%22%22%0AModel implementation%0A%22%22%22%0A%0Aimport json%0Afrom pathlib import Path%0A%0A_EXTENSION = '.json'%0A%0A%0Adef save(data, file_name, folder='', home_path=Path.home()):%0A fp = Path(home_path, folder)%0A while True:%0A if fp.exists():%0A break%0A else:%0A user_input = input(%22This directory doesn't exist. Do you want create it? %5Byes%5D/no: %22)%0A if _user_decision(user_input):%0A fp.mkdir()%0A break%0A else:%0A folder = input('Write new name: ')%0A fp = Path(home_path, folder)%0A%0A fp = Path(home_path, folder, file_name + _EXTENSION)%0A while True:%0A if fp.exists():%0A user_input = input('This file already exists. Do you want rename it? %5Byes%5D/no: ')%0A if _user_decision(user_input):%0A name = input('Write new file name: ')%0A fp = Path(home_path, folder, name)%0A else:%0A break%0A else:%0A break%0A%0A print('File saved in: ', fp)%0A # Save%0A fp = fp.open('w')%0A json.dump(data, fp, indent=4, ensure_ascii=False, sort_keys=True)%0A fp.close()%0A print('Save successfully!')%0A%0A%0Adef load(file_name, folder='', home_path=Path.home()):%0A # Check home_path%0A fp = Path(home_path, folder, file_name + _EXTENSION)%0A if fp.exists() and fp.is_file():%0A # load%0A fp = fp.open('r')%0A data_loaded = json.load(fp)%0A fp.close()%0A print(file_name, 'loaded successfully')%0A else:%0A fp_dir = Path(home_path, folder, file_name)%0A if fp.exists():%0A print('Invalid path')%0A elif fp_dir.is_dir():%0A print(%22It's a folder, not a file%22)%0A else:%0A print(%22This file doesn't exist%22)%0A data_loaded = %7B%7D%0A print('Empty data is loaded')%0A return data_loaded%0A%0A%0Adef _user_decision(answer, default_answer='yes'):%0A if answer is '':%0A answer = default_answer%0A%0A if answer == 'yes':%0A return True%0A elif answer == 'no':%0A return False%0A else:%0A print('Invalid answer')%0A answer = input('Please repeat the answer:')%0A return _user_decision(answer, default_answer)%0A
|
|
24210f31a5b54adf1b3b038fdad73b679656217c
|
fix mr_unit.py so that it records test failures properly
|
scripts/mr_unit.py
|
scripts/mr_unit.py
|
import sys, os
import csv
import MySQLdb
import traceback
def add_perf_results_to_mr_unit(args):
mr_unit = MySQLdb.connect(host='mr-0x8', user='root', passwd=args[1], db='mr_unit')
mr_unit.autocommit(False)
cursor = mr_unit.cursor()
try:
for row in csv.reader(file(os.path.join(args[2], "perf.csv"))):
row = [r.strip() for r in row]
row[3] = row[3].split("/")[-1]
row[8] = "TRUE" if row[8] == "1" else "FALSE"
cursor.execute('INSERT INTO perf(date, build_id, git_hash, git_branch, machine_ip, test_name, start_time, '
'end_time, pass, ncpu, os, job_name) VALUES("{0}", "{1}", "{2}", "{3}", "{4}", "{5}", "{6}"'
', "{7}", {8}, "{9}", "{10}", "{11}")'.format(*row))
mr_unit.commit()
except:
traceback.print_exc()
mr_unit.rollback()
assert False, "Failed to add performance results to mr_unit!"
if __name__ == '__main__':
add_perf_results_to_mr_unit(sys.argv)
|
Python
| 0.000001 |
@@ -410,66 +410,8 @@
-1%5D%0A
- row%5B8%5D = %22TRUE%22 if row%5B8%5D == %221%22 else %22FALSE%22%0A
@@ -687,11 +687,13 @@
%7D%22,
+%22
%7B8%7D
+%22
, %22%7B
|
f424001f409fd35b0e62be9a82d62b21b438e082
|
Add missing comma
|
onetime/urls.py
|
onetime/urls.py
|
from django.conf.urls.defaults import *
from django.views.generic.simple import redirect_to
from onetime.views import cleanup, login
urlpatterns = patterns(''
(r'^cleanup/$', cleanup),
(r'^(?P<key>[a-z0-9+])$', login),
(r'^$', redirect_to, {'url': None}),
)
|
Python
| 0.999998 |
@@ -153,16 +153,17 @@
terns(''
+,
%0A (r'
|
8d88bf0808c5249d2c1feace5b8a1db1679e44b6
|
Create tests_unit.py
|
tests_unit.py
|
tests_unit.py
|
Python
| 0.000003 |
@@ -0,0 +1,2416 @@
+%22%22%22%0ANmeta2 Unit Tests%0A%0AUses pytest, install with:%0A sudo apt-get install python-pytest%0A%0ATo run test, type in:%0A py.test tests_unit.py%0A%0A%22%22%22%0A%0A#*** Testing imports:%0Aimport mock%0Aimport unittest%0A%0A#*** Ryu imports:%0Afrom ryu.base import app_manager # To suppress cyclic import%0Afrom ryu.controller import controller%0Afrom ryu.controller import handler%0Afrom ryu.ofproto import ofproto_v1_3_parser%0Afrom ryu.ofproto import ofproto_v1_2_parser%0Afrom ryu.ofproto import ofproto_v1_0_parser%0Afrom ryu.app.wsgi import ControllerBase%0Afrom ryu.app.wsgi import WSGIApplication%0Afrom ryu.app.wsgi import route%0A%0A#*** JSON imports:%0Aimport json%0Afrom json import JSONEncoder%0A%0A#*** nmeta2 imports:%0Aimport switch_abstraction%0Aimport config%0Aimport api%0A%0A#*** Instantiate Config class:%0A_config = config.Config()%0A%0A#======================== tc_policy.py Unit Tests ============================%0A#*** Instantiate class:%0Aswitches = switch_abstraction.Switches(_config)%0A%0Asock_mock = mock.Mock()%0Aaddr_mock = mock.Mock()%0A%0A%0A#*** Test Switches and Switch classes that abstract OpenFlow switches:%0Adef test_switches():%0A with mock.patch('ryu.controller.controller.Datapath.set_state'):%0A #*** Set up a fake switch datapath:%0A datapath = controller.Datapath(sock_mock, addr_mock)%0A%0A #*** Add a switch%0A assert switches.add(datapath) == 1%0A%0A #*** Look up by DPID:%0A assert switches.datapath(datapath.id) == datapath%0A%0A#======================== api.py Unit Tests ============================%0A%0Aclass _TestController(ControllerBase):%0A%0A def __init__(self, req, link, data, **config):%0A super(_TestController, self).__init__(req, link, data, **config)%0A eq_(data%5B'test_param'%5D, 'foo')%0A%0Aclass Test_wsgi(unittest.TestCase):%0A %22%22%22%0A Test case for running WSGI controller for API testing%0A %22%22%22%0A def setUp(self):%0A wsgi = WSGIApplication()%0A #*** Instantiate API class:%0A self.api = api.Api(self, _config, wsgi)%0A%0Adef test_decode_JSON():%0A #*** The JSON_Body class is in the api.py module. Good JSON:%0A good_json = '%7B%5C%22foo%5C%22: %5C%22123%5C%22%7D'%0A good = api.JSON_Body(good_json)%0A assert not good.error%0A assert good.error == %22%22%0A assert good.json == %7B'foo': '123'%7D%0A assert good%5B'foo'%5D == '123'%0A assert good%5B'bar'%5D == 0%0A%0A #*** Bad JSON:%0A bad_json = %22foo, bar=99%22%0A bad = api.JSON_Body(bad_json)%0A assert bad.json == %7B%7D%0A assert bad.error == '%7B%5C%22Error%5C%22: %5C%22Bad JSON%5C%22%7D'%0A
|
|
cba429780061bcdafde6f2bc799e74106e2cc336
|
Create textevolve.py
|
textevolve.py
|
textevolve.py
|
Python
| 0.000001 |
@@ -0,0 +1,750 @@
+'''%0AEvolve a piece of text with a simple evolutionary algorithm%0AAuthor: Saquib%0A7/27/13%0A'''%0Aimport random%0A%0Adef fitness(source, target):%0A fitval = 0%0A for i in range(0, len(source)):%0A fitval += (ord(target%5Bi%5D) - ord(source%5Bi%5D)) ** 2%0A return(fitval)%0A%0Adef mutate(source):%0A charpos = random.randint(0, len(source) - 1)%0A parts = list(source)%0A parts%5Bcharpos%5D = chr(ord(parts%5Bcharpos%5D) + random.randint(-1,1))%0A return(''.join(parts))%0A%0Asource = %22;wql* opqlq%22%0Atarget = %22hello world%22%0Afitval = fitness(source, target)%0Ai = 0%0Awhile True:%0A i += 1%0A m = mutate(source)%0A fitval_m = fitness(m, target)%0A if fitval_m %3C fitval:%0A fitval = fitval_m%0A source = m%0A print %22%255i %255i %2514s%22 %25 (i, fitval_m, m)%0A if fitval == 0:%0A break%0A
|
|
ff98bdf9ce263648de784183ad5984864f9d387a
|
Add ref create api test
|
tests/api/test_refs.py
|
tests/api/test_refs.py
|
Python
| 0.000001 |
@@ -0,0 +1,883 @@
+async def test_create(spawn_client, test_random_alphanumeric, static_time):%0A client = await spawn_client(authorize=True, permissions=%5B%22create_ref%22%5D)%0A%0A data = %7B%0A %22name%22: %22Test Viruses%22,%0A %22description%22: %22A bunch of viruses used for testing%22,%0A %22data_type%22: %22genome%22,%0A %22organism%22: %22virus%22,%0A %22public%22: True%0A %7D%0A%0A resp = await client.post(%22/api/refs%22, data)%0A%0A assert resp.status == 201%0A%0A assert resp.headers%5B%22Location%22%5D == %22/api/refs/%22 + test_random_alphanumeric.history%5B0%5D%0A%0A assert await resp.json() == dict(%0A data,%0A id=test_random_alphanumeric.history%5B0%5D,%0A created_at=static_time.iso,%0A user=%7B%0A %22id%22: %22test%22%0A %7D,%0A users=%5B%7B%0A %22build%22: True,%0A %22id%22: %22test%22,%0A %22modify%22: True,%0A %22modify_kind%22: True,%0A %22remove%22: True%0A %7D%5D%0A )%0A
|
|
7d21b55f2de7cd2c34cd3cd985824178d382398d
|
add 'stages' code
|
util/stages.py
|
util/stages.py
|
Python
| 0.000123 |
@@ -0,0 +1,989 @@
+from astrometry.util.file import *%0A%0Aclass CallGlobal(object):%0A%09def __init__(self, pattern, *args, **kwargs):%0A%09%09self.pat = pattern%0A%09%09self.args = args%0A%09%09self.kwargs = kwargs%0A%09def __call__(self, stage, kwargs):%0A%09%09func = self.pat %25 stage%0A%09%09kwa = self.kwargs.copy()%0A%09%09kwa.update(kwargs)%0A%09%09return func(*self.args, **kwa)%0A%0Adef runstage(stage, picklepat, stagefunc, force=%5B%5D, prereqs=%7B%7D,%0A%09%09%09 **kwargs):%0A%09print 'Runstage', stage%0A%0A%09pfn = picklepat %25 stage%0A%09if os.path.exists(pfn):%0A%09%09if stage in force:%0A%09%09%09print 'Ignoring pickle', pfn, 'and forcing stage', stage%0A%09%09else:%0A%09%09%09print 'Reading pickle', pfn%0A%09%09%09R = unpickle_from_file(pfn)%0A%09%09%09return R%0A%0A%09if stage %3C= 0:%0A%09%09P = %7B%7D%0A%09else:%0A%09%09prereq = prereqs.get(stage, stage-1)%0A%0A%09%09P = runstage(prereq, picklepat, stagefunc,%0A%09%09%09%09%09 force=force, prereqs=prereqs, **kwargs)%0A%09else:%0A%09%09P = %7B%7D%0A%0A%09print 'Running stage', stage%0A%09R = stagefunc(stage, **P)%0A%09print 'Stage', stage, 'finished'%0A%0A%09print 'Saving pickle', pfn%0A%09pickle_to_file(R, pfn)%0A%09print 'Saved', pfn%0A%09return R%0A
|
|
6a9447b6fb92369496178b1a379c724dfa9eb7aa
|
add management command to bootstrap Twilio gateway fees for incoming messages
|
corehq/apps/smsbillables/management/commands/bootstrap_twilio_gateway_incoming.py
|
corehq/apps/smsbillables/management/commands/bootstrap_twilio_gateway_incoming.py
|
Python
| 0 |
@@ -0,0 +1,1246 @@
+import logging%0A%0Afrom django.core.management.base import LabelCommand%0A%0Afrom corehq.apps.accounting.models import Currency%0Afrom corehq.apps.twilio.models import TwilioBackend%0Afrom corehq.apps.sms.models import INCOMING%0Afrom corehq.apps.smsbillables.models import SmsGatewayFee, SmsGatewayFeeCriteria%0A%0Alogger = logging.getLogger('accounting')%0A%0A%0Adef bootstrap_twilio_gateway_incoming(orm):%0A currency_class = orm%5B'accounting.Currency'%5D if orm else Currency%0A sms_gateway_fee_class = orm%5B'smsbillables.SmsGatewayFee'%5D if orm else SmsGatewayFee%0A sms_gateway_fee_criteria_class = orm%5B'smsbillables.SmsGatewayFeeCriteria'%5D if orm else SmsGatewayFeeCriteria%0A%0A # https://www.twilio.com/sms/pricing/us%0A SmsGatewayFee.create_new(%0A TwilioBackend.get_api_id(),%0A INCOMING,%0A 0.0075,%0A country_code=None,%0A currency=currency_class.objects.get(code=%22USD%22),%0A fee_class=sms_gateway_fee_class,%0A criteria_class=sms_gateway_fee_criteria_class,%0A )%0A%0A logger.info(%22Updated INCOMING Twilio gateway fees.%22)%0A%0A%0Aclass Command(LabelCommand):%0A help = %22bootstrap incoming Twilio gateway fees%22%0A args = %22%22%0A label = %22%22%0A%0A def handle(self, *args, **options):%0A bootstrap_twilio_gateway_incoming(None)%0A
|
|
adee3f0763a1119cfac212ce0eca88a08f7c65fa
|
Create masterStock.py
|
masterStock.py
|
masterStock.py
|
Python
| 0 |
@@ -0,0 +1,711 @@
+import requests%0Afrom bs4 import BeautifulSoup%0Aimport json%0A%0Adef loadMasterStock():%0A%09url = %22http://www.supremenewyork.com/mobile_stock.json%22%0A%09user = %7B%22User-Agent%22: %22Mozilla/5.0 (iPhone; CPU iPhone OS 10_2_1 like Mac OS X) AppleWebKit/602.4.6 (KHTML, like Gecko) Version/10.0 Mobile/14D27 Safari/602.1%22%7D%0A%09# user = %7B%22User-Agent%22: %22Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36%22%7D%0A%09r = requests.get(url, headers=user)%0A%09masterStock = json.loads(r.text)%0A%09with open(%22masterstock.txt%22, 'w') as outfile:%0A%09%09json.dump(masterStock, outfile, indent=4, sort_keys=True)%0A%0A%09print(%22Saved to masterstock.txt%22)%0A%0Aif __name__ == '__main__':%0A%09loadMasterStock()%0A
|
|
d06b3b41f786fc7cd2c05a6215fed026eef1cb8f
|
Add misc plugin.
|
curiosity/plugins/misc.py
|
curiosity/plugins/misc.py
|
Python
| 0 |
@@ -0,0 +1,1045 @@
+import curio%0Aimport sys%0A%0Aimport curious%0Afrom curious.commands import command%0Afrom curious.commands.context import Context%0Afrom curious.commands.plugin import Plugin%0Afrom curious.dataclasses.embed import Embed%0A%0A%0Aclass Misc(Plugin):%0A %22%22%22%0A Miscellaneous commands.%0A %22%22%22%0A @command()%0A async def info(self, ctx: Context):%0A %22%22%22%0A Shows info about the bot.%0A %22%22%22%0A em = Embed(title=ctx.guild.me.user.name, description=%22The official bot for the curious library%22)%0A em.add_field(name=%22Curious version%22, value=curious.__version__)%0A em.add_field(name=%22Curio version%22, value=curio.__version__)%0A em.add_field(name=%22CPython version%22, value=%22%7B%7D.%7B%7D.%7B%7D%22.format(*sys.version_info%5B0:3%5D))%0A # bot stats%0A em.add_field(name=%22Shard ID%22, value=ctx.event_context.shard_id)%0A em.add_field(name=%22Shard count%22, value=ctx.event_context.shard_count)%0A em.add_field(name=%22Heartbeats%22, value=ctx.bot._gateways%5Bctx.event_context.shard_id%5D.heartbeats)%0A%0A await ctx.channel.send(embed=em)%0A%0A%0A
|
|
3d7a1ad963a11c8fc425c7d82f5e0f8f877dc861
|
Add Python benchmark
|
lib/node_modules/@stdlib/math/base/special/atan2/benchmark/python/benchmark.py
|
lib/node_modules/@stdlib/math/base/special/atan2/benchmark/python/benchmark.py
|
Python
| 0.000138 |
@@ -0,0 +1,1536 @@
+#!/usr/bin/env python%0A%22%22%22Benchmark atan2.%22%22%22%0A%0Aimport timeit%0A%0Aname = %22atan2%22%0Arepeats = 3%0Aiterations = 1000000%0A%0A%0Adef print_version():%0A %22%22%22Print the TAP version.%22%22%22%0A%0A print(%22TAP version 13%22)%0A%0A%0Adef print_summary(total, passing):%0A %22%22%22Print the benchmark summary.%0A%0A # Arguments%0A%0A * %60total%60: total number of tests%0A * %60passing%60: number of passing tests%0A%0A %22%22%22%0A%0A print(%22#%22)%0A print(%221..%22 + str(total)) # TAP plan%0A print(%22# total %22 + str(total))%0A print(%22# pass %22 + str(passing))%0A print(%22#%22)%0A print(%22# ok%22)%0A%0A%0Adef print_results(elapsed):%0A %22%22%22Print benchmark results.%0A%0A # Arguments%0A%0A * %60elapsed%60: elapsed time (in seconds)%0A%0A # Examples%0A%0A %60%60%60 python%0A python%3E print_results(0.131009101868)%0A %60%60%60%0A %22%22%22%0A%0A rate = iterations / elapsed%0A%0A print(%22 ---%22)%0A print(%22 iterations: %22 + str(iterations))%0A print(%22 elapsed: %22 + str(elapsed))%0A print(%22 rate: %22 + str(rate))%0A print(%22 ...%22)%0A%0A%0Adef benchmark():%0A %22%22%22Run the benchmark and print benchmark results.%22%22%22%0A%0A setup = %22from math import atan2; from random import random;%22%0A stmt = %22y = atan2(100.0*random()-0.0, 100.0*random()-0.0)%22%0A%0A t = timeit.Timer(stmt, setup=setup)%0A%0A print_version()%0A%0A for i in xrange(3):%0A print(%22# python::%22 + name)%0A elapsed = t.timeit(number=iterations)%0A print_results(elapsed)%0A print(%22ok %22 + str(i+1) + %22 benchmark finished%22)%0A%0A print_summary(repeats, repeats)%0A%0A%0Adef main():%0A %22%22%22Run the benchmark.%22%22%22%0A benchmark()%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
14160c8ee729a094b6a980ed7c94b37d11f6dfba
|
Create xor_recursive.py
|
tests/xor_recursive.py
|
tests/xor_recursive.py
|
Python
| 0 |
@@ -0,0 +1,1419 @@
+import sys%0A%0Adef xor(*store):%0A print(%22---------------recursive call----------------%22)%0A print(len(store))%0A if(len(store) == 2):%0A print(%22lowest level%22)%0A b = store%5B0%5D%0A a = store%5B1%5D%0A print(b)%0A print(a)%0A return bool((a or b) and not(a and b))%0A else:%0A print(%22middle level%22)%0A b = store%5B0%5D%0A remaining = store%5B1:%5D%0A print(b)%0A print(remaining)%0A return bool((xor(*remaining) or b) and not(xor(*remaining) and b))%0A%0Aif __name__ == '__main__':%0A print(%22This is a testfile only, not to be used in production.%22)%0A sys.exit()%0A print(%22Expecting False: %25s%22%25xor(0, 0, 0, 0)) # False%0A print(%22Expecting True : %25s%22%25xor(0, 0, 0, 1)) # True%0A print(%22Expecting True : %25s%22%25xor(0, 0, 1, 0)) # True%0A print(%22Expecting False: %25s%22%25xor(0, 0, 1, 1)) # False%0A print(%22Expecting True : %25s%22%25xor(0, 1, 0, 0)) # True%0A print(%22Expecting False: %25s%22%25xor(0, 1, 0, 1)) # False%0A print(%22Expecting False: %25s%22%25xor(0, 1, 1, 0)) # False%0A print(%22Expecting True : %25s%22%25xor(0, 1, 1, 1)) # True%0A print(%22Expecting True : %25s%22%25xor(1, 0, 0, 0)) # True%0A print(%22Expecting False: %25s%22%25xor(1, 0, 0, 1)) # False%0A print(%22Expecting False: %25s%22%25xor(1, 0, 1, 0)) # False%0A print(%22Expecting True : %25s%22%25xor(1, 0, 1, 1)) # True%0A print(%22Expecting False: %25s%22%25xor(1, 1, 0, 0)) # False%0A print(%22Expecting True : %25s%22%25xor(1, 1, 0, 1)) # True%0A print(%22Expecting True : %25s%22%25xor(1, 1, 1, 0)) # True%0A print(%22Expecting False: %25s%22%25xor(1, 1, 1, 1)) # False%0A
|
|
d003babe55d8b7a202a50bc6eeb2e1113ef8247f
|
Add oeis plugin
|
plugins/oeis.py
|
plugins/oeis.py
|
Python
| 0 |
@@ -0,0 +1,1506 @@
+import requests%0Aimport re%0A%0A%0Aclass Plugin:%0A limit = 5%0A%0A def on_command(self, bot, msg, stdin, stdout, reply):%0A session = smartbot.utils.web.requests_session()%0A url = %22http://oeis.org/search%22%0A payload = %7B%0A %22fmt%22: %22text%22,%0A %22q%22: %22 %22.join(msg%5B%22args%22%5D%5B1:%5D),%0A %7D%0A%0A response = session.get(url, params=payload)%0A if response.status_code == 200:%0A self.i = -1%0A # only process lines starting with a percent symbol%0A for line in filter(lambda l: l.startswith(%22%25%22), response.text.split(%22%5Cn%22)):%0A # content default is set to None%0A flag, identifier, content, *_ = line.split(%22 %22, 2) + %5BNone%5D%0A # process the line%0A self.process(flag, identifier, content, stdout)%0A # stop when limit is reached%0A if self.i %3E= self.limit:%0A print(%22...%22, file=stdout)%0A break%0A%0A def process(self, flag, identifier, content, stdout):%0A # increase the sequence number%0A if flag%5B1%5D == %22I%22:%0A self.i += 1%0A # print formatted sequence%0A elif flag%5B1%5D == %22S%22:%0A sequence = re.sub(%22,%22, %22, %22, content)%0A print(%22%5B%7B%7D%5D %7B%7D: %7B%7D...%22.format(self.i, identifier, sequence), file=stdout)%0A # print sequence name%0A elif flag%5B1%5D == %22N%22:%0A print(content, file=stdout)%0A%0A def on_help(self):%0A return %22Usage: oeis %3Cquery%3E (see https://oeis.org/hints.html)%22%0A
|
|
c122db5ceda59d786bd550f586ea87d808595ab6
|
Add a script to reimport the LGA boundaries from the GADM.org data
|
pombola/nigeria/management/commands/nigeria_update_lga_boundaries_from_gadm.py
|
pombola/nigeria/management/commands/nigeria_update_lga_boundaries_from_gadm.py
|
Python
| 0.000022 |
@@ -0,0 +1,2077 @@
+from django.contrib.gis.gdal import DataSource%0Afrom django.core.management import BaseCommand%0Afrom django.db import transaction%0A%0Afrom mapit.management.command_utils import save_polygons, fix_invalid_geos_geometry%0Afrom mapit.models import Area, Type%0A%0Aclass Command(BaseCommand):%0A help = %22Update the Nigeria boundaries from GADM%22%0A args = '%3CSHP FILENAME%3E'%0A%0A def get_lga_area(self, lga_name, state_name):%0A lga_name_in_db = %7B%0A 'Eastern Obolo': 'Eastern O bolo',%0A %7D.get(lga_name, lga_name)%0A%0A # print %22state:%22, state_name%0A kwargs = %7B%0A 'type': self.lga_type,%0A 'name__iexact': lga_name_in_db,%0A 'parent_area__name': state_name,%0A %7D%0A try:%0A area = Area.objects.get(**kwargs)%0A except Area.DoesNotExist:%0A del kwargs%5B'parent_area__name'%5D%0A area = Area.objects.get(**kwargs)%0A return area%0A%0A def fix_geometry(self, g):%0A # Make a GEOS geometry only to check for validity:%0A geos_g = g.geos%0A if not geos_g.valid:%0A geos_g = fix_invalid_geos_geometry(geos_g)%0A if geos_g is None:%0A print %22The geometry was invalid and couldn't be fixed%22%0A g = None%0A else:%0A g = geos_g.ogr%0A return g%0A%0A def handle(self, filename, **options):%0A with transaction.atomic():%0A self.lga_type = Type.objects.get(code='LGA')%0A ds = DataSource(filename)%0A layer = ds%5B0%5D%0A for feature in layer:%0A lga_name = unicode(feature%5B'NAME_2'%5D)%0A state_name = unicode(feature%5B'NAME_1'%5D)%0A print %22Updating LGA %7B0%7D in state %7B1%7D%22.format(%0A lga_name, state_name%0A )%0A area = self.get_lga_area(lga_name, state_name)%0A g = feature.geom.transform('4326', clone=True)%0A g = self.fix_geometry(g)%0A if g is None:%0A continue%0A poly = %5Bg%5D%0A save_polygons(%7Barea.id: (area, poly)%7D)%0A
|
|
271be0bf16692aae2736d40e96447262e75c4a0f
|
add missing web.py
|
zmq/web.py
|
zmq/web.py
|
Python
| 0 |
@@ -0,0 +1,542 @@
+#-----------------------------------------------------------------------------%0A# Copyright (C) 2013 Brian Granger, Min Ragan-Kelley%0A#%0A# This file is part of pyzmq%0A#%0A# Distributed under the terms of the New BSD License. The full license is in%0A# the file COPYING.BSD, distributed as part of this software.%0A#-----------------------------------------------------------------------------%0A%0Araise ImportError('%5Cn'.join(%5B%0A %22zmq.web is now maintained separately as zmqweb,%22,%0A %22which can be found at https://github.com/ellisonbg/zmqweb%22%0A%5D))%0A
|
|
0a7f1695f9155bbe10b933e47637e4df0e2e31d4
|
Create HttpAndWeb.py
|
day3/HttpAndWeb.py
|
day3/HttpAndWeb.py
|
Python
| 0.000002 |
@@ -0,0 +1,1183 @@
+import requests%0A%0A%0Adef Get(url, PostId):%0A%09try:%0A%09%09isinstance(int(PostId), int)%0A%09%09if int(PostId) %3C= 100 and int(PostId) %3E 0:%0A%09%09%09r = requests.get(url + PostId)%0A%09%09%09return r%0A%09%09else:%0A%09%09%09print(%22Number must be between 1 and 100%22)%0A%09except ValueError as err:%0A%09%09raise(err)%0A%09return %22No Results%22%0A%0Adef Post(PostUrl,title, body, userId=11):%0A%09Post= %7B%0A%09%09'title': title,%0A%09%09'body': body,%0A%09%09'userId': userId%0A%09%09%7D%0A%09request = requests.post(PostUrl, data=Postdata)%0A%09return request%0A%0Adef main():%0A%09print(%22Python HTTP API command line app %25s%5Cn%22 %25(%22-%22*31))%0A%09print(%22Simple Python HTTP API command line app%22)%0A%09%0A%09url = %22https://jsonplaceholder.typicode.com/posts/%22%0A%0A%09PostId = input(%22Enter a number between 1 and 100: %22)%0A%09get = Get(url,PostId)%0A%09print(%22GET Response data%5Cn%5Ct%25s%5Cn%25s%5Cn%5CtStatus code%5Cn%5Ct%25s%5Cn%25s%5Cn%5CtHeaders%5Cn%5Ct%25s%5Cn%25s%22 %25 %0A%09%09(%22-%22*17,get.text, %22-%22*11, get.status_code,%22-%22*7, get.headers))%0A%0A%09title = input(%22Enter a title for your post: %22)%0A%09body = input(%22Enter a body for your post: %22)%0A%0A%09post = Post(url,title,body)%0A%09print(%22%5CtPOST Response data%5Cn%5Ct%25s%5Cn%25s%5Cn%5CtStatus code%5Cn%5Ct%25s%5Cn%25s%5Cn%5CtHeaders%5Cn%5Ct%25s%5Cn%25s%22 %25 %0A%09%09(%22-%22*17,post.text, %22-%22*11, post.status_code,%22-%22*7, post.headers))%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
9fa6ec498d70afdb4f28410d4ac7c29780c60861
|
Add first stab at origen submodule
|
transmutagen/origen.py
|
transmutagen/origen.py
|
Python
| 0.000001 |
@@ -0,0 +1,1120 @@
+from subprocess import run%0A%0Afrom pyne.origen22 import (nlibs, write_tape5_irradiation, write_tape4,%0A parse_tape9, merge_tape9, write_tape9)%0Afrom pyne.material import from_atom_frac%0A%0Aif __name__ == '__main__':%0A ORIGEN = '/home/origen22/code/o2_therm_linux.exe'%0A%0A xs_TAPE9 = %22/Users/aaronmeurer/Documents/origen/C371ALLCP.03/CCC371.03/origen22/libs/pwru50.lib%22%0A decay_TAPE9 = %22/Users/aaronmeurer/Documents/origen/C371ALLCP.03/CCC371.03/origen22/libs/decay.lib%22%0A%0A parsed_xs_tape9 = parse_tape9(xs_TAPE9)%0A parsed_decay_tape9 = parse_tape9(decay_TAPE9)%0A%0A merged_tape9 = merge_tape9(%5Bparsed_decay_tape9, parsed_xs_tape9%5D)%0A%0A # Can set outfile to change directory, but the file name needs to be%0A # TAPE9.INP.%0A write_tape9(merged_tape9)%0A%0A xsfpy_nlb = nlibs(parsed_xs_tape9)%0A%0A time = 2.6e6%0A%0A # Can set outfile, but the file name should be called TAPE5.INP.%0A write_tape5_irradiation(%22IRF%22, time/(60*60*24), 4e14, xsfpy_nlb=xsfpy_nlb, cut_off=0)%0A%0A M = from_atom_frac(%7B%22%22: 1%7D, mass=1, atoms_per_molecule=1)%0A%0A write_tape4(M)%0A%0A run(ORIGEN)%0A%0A data = parse_tape6()%0A%0A print(data)%0A
|
|
72f9d74fe6503de45e7251460d5419eebcabfb7e
|
Add files via upload
|
old/hefesto_temp_fix.py
|
old/hefesto_temp_fix.py
|
Python
| 0 |
@@ -0,0 +1,1250 @@
+import os%0A%0A%0A%0A%0A%0A%0Adef replace_temp(inputfile_folder):%0A os.chdir(inputfile_folder)%0A home_dir = os.getcwd()%0A for i in os.listdir(os.getcwd()):%0A if os.path.isdir(i):%0A os.chdir(i)%0A print(%22In folder: %7B%7D%22.format(os.getcwd()))%0A for z in os.listdir(os.getcwd()):%0A if '.txt' in z:%0A with open(z, 'r') as infile:%0A with open(%22temp.txt%22, 'w') as outfile:%0A print(%22%5CnChanging string in file: %7B%7D%22.format(z))%0A infile_text = infile.read()%0A s = infile_text.replace(%22,20,80,1200,0,-2,0%22, %220,20,80,1600,0,-2,0%22)%0A outfile.write(s)%0A os.remove(z)%0A os.rename(%22temp.txt%22, z)%0A infile.close()%0A print(%22Success! Replaced string in file: %7B%7D%22.format(z))%0A os.chdir(home_dir)%0A%0A%0A%0A%0A%0A%0Adef initialization():%0A print(%22%5Cn%5Cn%5Cn%5CnPlease specify your HeFESTo input file folder (in Exoplanet Pocketknife format):%22)%0A in1 = input(%22%5Cn%3E%3E%3E %22)%0A if in1 in os.listdir(os.getcwd()):%0A replace_temp(inputfile_folder=in1)%0A else:%0A initialization()%0A%0A%0A%0A%0Ainitialization()
|
|
816872186966186eb463d1fd45bea3a4c6f68e00
|
Add new sanity test for demoproject views
|
demoproject/tests_demo.py
|
demoproject/tests_demo.py
|
Python
| 0 |
@@ -0,0 +1,673 @@
+from demoproject.urls import urlpatterns%0Afrom django.test import Client, TestCase%0A%0A%0Aclass DemoProject_TestCase(TestCase):%0A def setUp(self):%0A self.client = Client()%0A%0A def test_all_views_load(self):%0A %22%22%22%0A A simple sanity test to make sure all views from demoproject%0A still continue to load!%0A %22%22%22%0A for url in urlpatterns:%0A address = url._regex%0A if address.startswith('%5E'):%0A address = '/' + address%5B1:%5D%0A if address.endswith('$'):%0A address = address%5B:-1%5D%0A response = self.client.get(address)%0A self.assertEqual(response.status_code, 200)%0A
|
|
2cdf9728bd185fa7a32e4a7f758311594245fae0
|
Add proc_suffixes_file.py
|
file_path/proc_suffixes_file.py
|
file_path/proc_suffixes_file.py
|
Python
| 0.000001 |
@@ -0,0 +1,1561 @@
+#!/usr/bin/env python%0Aimport os%0Aimport re%0Aimport sys%0A%0ASUFFIX_PAT = re.compile(r'(?P%3CFILE%3E%5Ba-zA-z0-9%5D+)_%5Cd+%5Cb')%0ASUFFIXED_LIST = %5B%5D%0A%0A%0Adef is_suffixed_file(dir_path, file_name):%0A base_name, ext_name = os.path.splitext(file_name)%0A match_obj = SUFFIX_PAT.match(base_name)%0A if not match_obj:%0A return False%0A%0A no_suffixed_file = os.path.join(dir_path, match_obj.group('FILE') + ext_name)%0A if not os.path.exists(no_suffixed_file):%0A return False%0A%0A return True%0A%0A%0Adef collect_suffixed_file(dir_path, file_name):%0A if not is_suffixed_file(dir_path, file_name):%0A return%0A%0A suffix_file = os.path.join(dir_path, file_name)%0A SUFFIXED_LIST.append(suffix_file)%0A%0A%0Adef remove_files():%0A if not SUFFIXED_LIST:%0A print 'No suffixes file.'%0A return%0A%0A SUFFIXED_LIST.sort()%0A for name in SUFFIXED_LIST:%0A print name%0A%0A input_str = raw_input('Do you want to remove this files: %5BY/N%5D')%0A if input_str.upper() != 'Y':%0A return%0A%0A for name in SUFFIXED_LIST:%0A try:%0A os.remove(name)%0A print '%25s removed.' %25 name%0A except OSError, e:%0A print e%0A%0A%0Adef main():%0A if len(sys.argv) %3C 2:%0A print 'Please a directory.'%0A return%0A%0A if not os.path.isdir(sys.argv%5B1%5D):%0A print 'Please input valid path - %25s' %25 sys.argv%5B1%5D%0A return%0A%0A for dir_path, dir_list, file_list in os.walk(sys.argv%5B1%5D):%0A for file_name in file_list:%0A collect_suffixed_file(dir_path, file_name)%0A%0A remove_files()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
286c9c8a6618fc0a87dbe1b50787331986155940
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
959aecd612f66eee22e179f985227dbb6e63202a
|
Move buckling calcs to continuum_analysis
|
__init__.py
|
__init__.py
|
Python
| 0 |
@@ -0,0 +1,165 @@
+from abaqus_model import *%0D%0Afrom abaqus_postproc import *%0D%0Afrom continuum_analysis import *%0D%0Afrom rayleighritz import RayleighRitzDiscrete%0D%0Afrom stiffcalc import *%0D%0A
|
|
6d2735035d7230e6a709f66be93b760531a42868
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
662b0754ea73ef9dc19c50ac8d9b3e2aaa7fbb02
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
6c42f104c8248b8e64aa72c1783ece510c6f9fac
|
fix repr formatting
|
djangae/core/paginator.py
|
djangae/core/paginator.py
|
import collections
from django.utils import six
from django.core.paginator import PageNotAnInteger, EmptyPage
class DatastorePaginator(object):
"""
A paginator which only supports previous/next page controls and avoids doing
expensive count() calls on datastore-backed queries.
Does not implement the full Paginator API.
"""
NOT_SUPPORTED_MSG = "Property '{}' is not supported when paginating datastore-models"
def __init__(self, object_list, per_page, orphans=0,
allow_empty_first_page=True):
self.fetched_objects = object_list
self.object_list = []
self.per_page = int(per_page)
self.allow_empty_first_page = allow_empty_first_page
def validate_number(self, number):
"""
Validates the given 1-based page number.
"""
try:
number = int(number)
except (TypeError, ValueError):
raise PageNotAnInteger('That page number is not an integer')
if number < 1:
raise EmptyPage('That page number is less than 1')
return number
def page(self, number):
"""
Returns a Page object for the given 1-based page number.
"""
number = self.validate_number(number)
bottom = (number - 1) * self.per_page
top = bottom + self.per_page
self.fetched_objects = self.fetched_objects[bottom:top + 1]
self.object_list = self.fetched_objects[:self.per_page]
return DatastorePage(self.fetched_objects, self.object_list, number, self)
def _get_count(self):
"""
Returns the total number of objects, across all pages.
"""
raise NotImplementedError(self.NOT_SUPPORTED_MSG.format('count'))
count = property(_get_count)
def _get_num_pages(self):
"""
Returns the total number of pages.
"""
raise NotImplementedError(self.NOT_SUPPORTED_MSG.format('num_pages'))
num_pages = property(_get_num_pages)
def _get_page_range(self):
"""
Returns a 1-based range of pages for iterating through within
a template for loop.
"""
raise NotImplementedError(self.NOT_SUPPORTED_MSG.format('page_range'))
page_range = property(_get_page_range)
class DatastorePage(collections.Sequence):
def __init__(self, fetched_objects, object_list, number, paginator):
self.fetched_objects = fetched_objects
self.object_list = object_list
self.number = number
self.paginator = paginator
def __repr__(self):
bottom = (self.number - 1) * self.paginator.per_page
top = len(self.object_list)
return '<Objects %s to %s>'.format(bottom, top)
def __len__(self):
return len(self.object_list)
def __getitem__(self, index):
if not isinstance(index, (slice,) + six.integer_types):
raise TypeError
# The object_list is converted to a list so that if it was a QuerySet
# it won't be a database hit per __getitem__.
if not isinstance(self.object_list, list):
self.object_list = list(self.object_list)
return self.object_list[index]
def has_next(self):
return len(self.fetched_objects) > len(self.object_list)
def has_previous(self):
return self.number > 1
def has_other_pages(self):
return self.has_previous() or self.has_next()
def next_page_number(self):
return self.paginator.validate_number(self.number + 1)
def previous_page_number(self):
return self.paginator.validate_number(self.number - 1)
def start_index(self):
"""
Returns the 1-based index of the first object on this page,
relative to total objects in the paginator.
"""
# Special case, return zero if no items.
if self.number == 1 and len(self.object_list) == 0:
return 0
return (self.paginator.per_page * (self.number - 1)) + 1
def end_index(self):
"""
Returns the 1-based index of the last object on this page,
relative to total objects found (hits).
"""
return (self.paginator.per_page * (self.number - 1)) + len(self.object_list)
|
Python
| 0.000003 |
@@ -2688,16 +2688,18 @@
cts
-%25s
+%7B0%7D
to
-%25s
+%7B1%7D
%3E'.f
|
05a6080eed951f80da3b6f7ee4962101884f328e
|
add testing utility for checking term lookback windows
|
zipline/pipeline/factors/testing.py
|
zipline/pipeline/factors/testing.py
|
Python
| 0 |
@@ -0,0 +1,2413 @@
+import numpy as np%0A%0Afrom zipline.testing.predicates import assert_equal%0Afrom .factor import CustomFactor%0A%0A%0Aclass IDBox(object):%0A %22%22%22A wrapper that hashs to the id of the underlying object and compares%0A equality on the id of the underlying.%0A%0A Parameters%0A ----------%0A ob : any%0A The object to wrap.%0A%0A Attributes%0A ----------%0A ob : any%0A The object being wrapped.%0A%0A Notes%0A -----%0A This is useful for storing non-hashable values in a set or dict.%0A %22%22%22%0A def __init__(self, ob):%0A self.ob = ob%0A%0A def __hash__(self):%0A return id(self)%0A%0A def __eq__(self, other):%0A if not isinstance(other, IDBox):%0A return NotImplemented%0A%0A return id(self.ob) == id(other.ob)%0A%0A%0Aclass CheckWindowsFactor(CustomFactor):%0A %22%22%22A custom factor that makes assertions about the lookback windows that%0A it gets passed.%0A%0A Parameters%0A ----------%0A input_ : Term%0A The input term to the factor.%0A window_length : int%0A The length of the lookback window.%0A expected_windows : dict%5Bint, dict%5Bpd.Timestamp, np.ndarray%5D%5D%0A For each asset, for each day, what the expected lookback window is.%0A%0A Notes%0A -----%0A The output of this factor is the same as %60%60Latest%60%60. Any assets or days%0A not in %60%60expected_windows%60%60 are not checked.%0A %22%22%22%0A params = ('expected_windows',)%0A%0A def __new__(cls, input_, window_length, expected_windows):%0A return super(CheckWindowsFactor, cls).__new__(%0A cls,%0A inputs=%5Binput_%5D,%0A dtype=input_.dtype,%0A window_length=window_length,%0A expected_windows=frozenset(%0A (k, IDBox(v)) for k, v in expected_windows.items()%0A ),%0A )%0A%0A def compute(self, today, assets, out, input_, expected_windows):%0A for asset, expected_by_day in expected_windows:%0A expected_by_day = expected_by_day.ob%0A%0A col_ix = np.searchsorted(assets, asset)%0A if assets%5Bcol_ix%5D != asset:%0A raise AssertionError('asset %25s is not in the window' %25 asset)%0A%0A try:%0A expected = expected_by_day%5Btoday%5D%0A except KeyError:%0A pass%0A else:%0A expected = np.array(expected)%0A actual = input_%5B:, col_ix%5D%0A assert_equal(actual, expected)%0A%0A # output is just latest%0A out%5B:%5D = input_%5B-1%5D%0A
|
|
d3a684b06d2d61f2a498346f78a5cbbabd7828e7
|
Create elastic_search.py
|
misc/elastic_search.py
|
misc/elastic_search.py
|
Python
| 0.000057 |
@@ -0,0 +1,1025 @@
+import requests%0Aimport json%0Aimport pprint%0A%0Aes = 'http://hostt:9200/'%0Aquery = '''%0A%7B'fields': %5B'field1', 'field2',%5D,%0A 'filter': %7B'bool': %7B'must': %5B%7B'terms': %7B'field1': %5B1,%0A 2%5D%7D%7D,%0A %7B'bool': %7B'should': %5B%7B'term': %7B'field2': 'p'%7D%7D,%0A %7B'bool': %7B'must': %5B%7B'term': %7B'field3': 'interesting'%7D%7D,%0A %5D%0A %7D%0A %7D%0A %5D%0A %7D%0A %7D%0A %5D%0A %7D%0A %7D%0A'from': 0,%0A'query': %7B'match_all': %7B%7D%7D,%0A'size': 100,%0A'search_type: 'scan',%0A%7D%0A %0A%0Aindex = '/index-name'%0Amethod = '/_search'%0Apayload = json.dumps(query)%0A%0Ares = requests.get(es + index + method, data=payload)%0Apprint.pprint(res.json())%0A
|
|
9db1db1d78be7a6199c145fa6bf7b29eb4f2c857
|
fix iarc cron
|
mkt/developers/cron.py
|
mkt/developers/cron.py
|
import datetime
import logging
import cronjobs
from celery.task.sets import TaskSet
from tower import ugettext as _
import amo
import lib.iarc
from amo.utils import chunked
from mkt.constants.iarc_mappings import RATINGS
from mkt.developers.tasks import (refresh_iarc_ratings, region_email,
region_exclude)
from mkt.reviewers.models import RereviewQueue
from mkt.webapps.models import AddonExcludedRegion, Webapp
log = logging.getLogger('z.mkt.developers.cron')
def _region_email(ids, regions):
ts = [region_email.subtask(args=[chunk, regions])
for chunk in chunked(ids, 100)]
TaskSet(ts).apply_async()
@cronjobs.register
def send_new_region_emails(regions):
"""Email app developers notifying them of new regions added."""
excluded = (AddonExcludedRegion.objects
.filter(region__in=[r.id for r in regions])
.values_list('addon', flat=True))
ids = (Webapp.objects.exclude(id__in=excluded)
.filter(enable_new_regions=True)
.values_list('id', flat=True))
_region_email(ids, regions)
def _region_exclude(ids, regions):
ts = [region_exclude.subtask(args=[chunk, regions])
for chunk in chunked(ids, 100)]
TaskSet(ts).apply_async()
@cronjobs.register
def exclude_new_region(regions):
"""
Update regional blacklist based on a list of regions to exclude.
"""
excluded = (AddonExcludedRegion.objects
.filter(region__in=[r.id for r in regions])
.values_list('addon', flat=True))
ids = (Webapp.objects.exclude(id__in=excluded)
.filter(enable_new_regions=False)
.values_list('id', flat=True))
_region_exclude(ids, regions)
@cronjobs.register
def process_iarc_changes(date=None):
"""
Queries IARC for recent changes in the past 24 hours (or date provided).
If date provided use it. It should be in the form YYYY-MM-DD.
NOTE: Get_Rating_Changes only sends the diff of the changes
by rating body. They only send data for the ratings bodies that
changed.
"""
if not date:
date = datetime.date.today()
else:
date = datetime.datetime.strptime(date, '%Y-%m-%d').date()
client = lib.iarc.client.get_iarc_client('services')
xml = lib.iarc.utils.render_xml('get_rating_changes.xml', {
'date_from': date - datetime.timedelta(days=1),
'date_to': date,
})
resp = client.Get_Rating_Changes(XMLString=xml)
data = lib.iarc.utils.IARC_XML_Parser().parse_string(resp)
for row in data.get('rows', []):
iarc_id = row.get('submission_id')
if not iarc_id:
log.debug('IARC changes contained no submission ID: %s' % row)
continue
try:
app = Webapp.objects.get(iarc_info__submission_id=iarc_id)
except Webapp.DoesNotExist:
log.debug('Could not find app associated with IARC submission ID: '
'%s' % iarc_id)
continue
try:
# Fetch and save all IARC info.
refresh_iarc_ratings([app.id])
# Flag for rereview if it changed to adult.
ratings_body = row.get('rating_system')
rating = RATINGS[ratings_body].get(row['new_rating'])
_flag_rereview_adult(app, ratings_body, rating)
# Log change reason.
reason = row.get('change_reason')
amo.log(amo.LOG.CONTENT_RATING_CHANGED, app,
details={'comments': '%s:%s, %s' %
(ratings_body.name, rating.name, reason)})
except Exception as e:
# Any exceptions we catch, log, and keep going.
log.debug('Exception: %s' % e)
continue
def _flag_rereview_adult(app, ratings_body, rating):
"""Flag app for rereview if it receives an Adult content rating."""
old_rating = app.content_ratings.filter(ratings_body=ratings_body.id)
if not old_rating.exists():
return
if rating.adult and not old_rating[0].get_rating().adult:
RereviewQueue.flag(
app, amo.LOG.CONTENT_RATING_TO_ADULT,
message=_('Content rating changed to Adult.'))
|
Python
| 0.000329 |
@@ -3265,16 +3265,19 @@
ngs_body
+.id
%5D.get(ro
|
7bf376c57cc989f382f6a1cdc6a5f956b2c73fd6
|
Add pixels_with_value()
|
ml/img/segmentation.py
|
ml/img/segmentation.py
|
Python
| 0 |
@@ -0,0 +1,102 @@
+import numpy as np%0A%0A%0Adef pixels_with_value(img, val):%0A return np.all(img==np.array(val), axis=2)%0A%0A%0A
|
|
13c40d631c5d0e6035ea143a68e45201691b46a5
|
Create 0303_restaurant_plural_foods.py
|
2019/0303_restaurant_plural_foods.py
|
2019/0303_restaurant_plural_foods.py
|
Python
| 0.999927 |
@@ -0,0 +1,1439 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ANPR 2019-03-03%0Ahttps://www.npr.org/2019/03/03/699735287/sunday-puzzle-in-this-game-a-chance-to-claim-vic-tor-y%0A%0AName a popular restaurant chain in two words. %0AIts letters can be rearranged to spell some things to eat and some things to drink. %0ABoth are plural words. What things are these, and what's the chain?%0A%22%22%22%0Aimport sys%0Asys.path.append('..')%0Aimport nprcommontools as nct%0Aimport json%0A#%25%25%0A# Get a list of restaurants%0Arestaurants = nct.wikipedia_category_members('Restaurant_chains_in_the_United_States',3)%0A# Two-word restaurants%0Agood_restaurants = set(x for x in restaurants if x.count(' ') == 1)%0A#%25%25%0A# Food and drink are both under the category 'food' in Wordnet%0Afood_and_drink = nct.get_category_members('food')%0A#%25%25%0A# Get plurals of foods%0Awith open(r'../plurals.json','r') as fid:%0A plurals1 = json.load(fid)%0Aplurals = set()%0Afor word,pls in plurals1.items():%0A if word in food_and_drink:%0A for pl in pls:%0A plurals.add(pl)%0A#%25%25%0A# All sorted strings consisting of two plurals%0Aplural_dict = dict()%0Aplurals_list = list(plurals)%0Afor i in range(len(plurals_list)):%0A for j in range(i+1,len(plurals_list)):%0A plural_dict%5Bnct.sort_string(nct.alpha_only(plurals_list%5Bi%5D+plurals_list%5Bj%5D))%5D = (plurals_list%5Bi%5D,plurals_list%5Bj%5D)%0A#%25%25%0Afor r in good_restaurants:%0A r_sorted = nct.sort_string(nct.alpha_only(r.lower()))%0A if r_sorted in plural_dict:%0A print(r,plural_dict%5Br_sorted%5D)%0A
|
|
ed46c3887c7b51cd75d46523af7b901b79eb92fc
|
add import script for Milton Keynes (closes #863)
|
polling_stations/apps/data_collection/management/commands/import_milton_keynes.py
|
polling_stations/apps/data_collection/management/commands/import_milton_keynes.py
|
Python
| 0 |
@@ -0,0 +1,392 @@
+from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter%0A%0Aclass Command(BaseXpressDemocracyClubCsvImporter):%0A council_id = 'E06000042'%0A addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (1).tsv'%0A stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (1).tsv'%0A elections = %5B'parl.2017-06-08'%5D%0A csv_delimiter = '%5Ct'%0A
|
|
4f042e64e3155abfc4b86f61623a4d999dad0f89
|
Move tinyHttpServer.py
|
tinyHttpServer.py
|
tinyHttpServer.py
|
Python
| 0.000004 |
@@ -0,0 +1,340 @@
+import SimpleHTTPServer%0Aimport SocketServer%0A%0APORT = 8080%0A%0Atry:%0A Handler = SimpleHTTPServer.SimpleHTTPRequestHandler%0A httpd = SocketServer.TCPServer((%22%22, PORT), Handler)%0A print(%22serving at port %25d%22 %25 (PORT))%0A print(%22Type Ctrl+C to quit%22)%0A httpd.serve_forever()%0A%0Aexcept KeyboardInterrupt as e:%0A print(%22%5Cnserver stopped%5CnBye...%22) %0A%0A
|
|
5fb6b31ea928162c5185d66381ae99c7454d33c0
|
Add comb sort algorithm
|
sorts/comb_sort.py
|
sorts/comb_sort.py
|
Python
| 0.000008 |
@@ -0,0 +1,1595 @@
+%22%22%22%0AComb sort is a relatively simple sorting algorithm originally designed by Wlodzimierz Dobosiewicz in 1980.%0ALater it was rediscovered by Stephen Lacey and Richard Box in 1991. Comb sort improves on bubble sort.%0A%0AThis is pure python implementation of counting sort algorithm%0AFor doctests run following command:%0Apython -m doctest -v comb_sort.py%0Aor%0Apython3 -m doctest -v comb_sort.py%0A%0AFor manual testing run:%0Apython comb_sort.py%0A%22%22%22%0A%0Adef comb_sort(data):%0A %22%22%22Pure implementation of comb sort algorithm in Python%0A :param collection: some mutable ordered collection with heterogeneous%0A comparable items inside%0A :return: the same collection ordered by ascending%0A Examples:%0A %3E%3E%3E comb_sort(%5B0, 5, 3, 2, 2%5D)%0A %5B0, 2, 2, 3, 5%5D%0A %3E%3E%3E comb_sort(%5B%5D)%0A %5B%5D%0A %3E%3E%3E comb_sort(%5B-2, -5, -45%5D)%0A %5B-45, -5, -2%5D%0A %22%22%22%0A shrink_factor = 1.3%0A gap = len(data)%0A swapped = True%0A i = 0%0A%0A while gap %3E 1 or swapped:%0A %09# Update the gap value for a next comb%0A gap = int(float(gap) / shrink_factor)%0A%0A swapped = False%0A i = 0%0A%0A while gap + i %3C len(data):%0A if data%5Bi%5D %3E data%5Bi+gap%5D:%0A %09# Swap values%0A data%5Bi%5D, data%5Bi+gap%5D = data%5Bi+gap%5D, data%5Bi%5D%0A swapped = True%0A i += 1%0A%0A return data%0A%0A%0Aif __name__ == '__main__':%0A try:%0A raw_input # Python 2%0A except NameError:%0A raw_input = input # Python 3%0A%0A user_input = raw_input('Enter numbers separated by a comma:%5Cn').strip()%0A unsorted = %5Bint(item) for item in user_input.split(',')%5D%0A print(comb_sort(unsorted))%0A
|
|
2ed853301e8cedb72c7c07367d58e55cac23aa7c
|
add PI to arduino
|
raspi-pl2303.py
|
raspi-pl2303.py
|
Python
| 0.999473 |
@@ -0,0 +1,369 @@
+#!/usr/bin/env python%0A%0Aimport serial%0A%0Aprint 'RaspberryPi PL2303(USB2SerialTTL) communicate.'%0Aprint %22Copyright (c) 2015 winlin([email protected])%22%0A%0Af = serial.Serial('/dev/ttyUSB0', 115200)%0A%0Awhile True:%0A v = %22Hello, Arduino, this is RaspberryPi 2.0~%22%0A f.write(v)%0A print 'PI: %25s'%25(v)%0A%0A r = ''%0A for i in v:%0A r += f.read()%0A print 'Arduino: %25s'%25(r)
|
|
30412406b354f510a7321c3b3a159df6d7743668
|
Add a database file for Loren to complete
|
utils/database.py
|
utils/database.py
|
Python
| 0 |
@@ -0,0 +1,128 @@
+import database_setup%0A%0Aassert(database_setup.is_table_set_up())%0A%0A# TODO: Create the stubs for database files%0A# Assignee: Loren%0A%0A
|
|
929abedc5f971a58dfb54b706c66548609351835
|
Create fair_warning.py
|
google-code-jam/fair_warning.py
|
google-code-jam/fair_warning.py
|
Python
| 0.002125 |
@@ -0,0 +1,669 @@
+%22%22%22%0Ahttps://code.google.com/codejam/contest/433101/dashboard#s=p1%0A%22%22%22%0A%0Adef gcd(a, b):%0A if b %3E a:%0A return gcd(b, a)%0A elif b == 0:%0A return a%0A else:%0A return gcd(b, a %25 b)%0A%0A%0Adef big_gcd(a):%0A return reduce(lambda x,y: gcd(x, y), a)%0A%0A%0Adef solve(nums):%0A nums = sorted(nums)%0A diffs = %5B(nums%5Bi%5D - nums%5Bi-1%5D) for i in xrange(1, len(nums))%5D%0A T = big_gcd(diffs)%0A n = nums%5B0%5D%0A return 0 if n %25 T == 0 else T - (n %25 T)%0A%0A%0Adef main():%0A C = int(raw_input())%0A for c in xrange(1, C+1):%0A nums = map(int, raw_input().strip().split())%0A print 'Case #%7B%7D: %7B%7D'.format(c, solve(nums%5B1:%5D))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
0c98d2e439a13f6979271f0386e93aca1cf255e2
|
Update model_config.py
|
tensorflow/python/keras/saving/model_config.py
|
tensorflow/python/keras/saving/model_config.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=protected-access
"""Functions that save the model's config into different formats.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
from tensorflow.python.util.tf_export import keras_export
# pylint: disable=g-import-not-at-top
try:
import yaml
except ImportError:
yaml = None
# pylint: enable=g-import-not-at-top
@keras_export('keras.models.model_from_config')
def model_from_config(config, custom_objects=None):
"""Instantiates a Keras model from its config.
Usage:
\```
# for a Functional API model
tf.keras.Model().from_config(model.get_config())
# for a Sequential model
tf.keras.Sequential().from_config(model.get_config())
\```
Arguments:
config: Configuration dictionary.
custom_objects: Optional dictionary mapping names
(strings) to custom classes or functions to be
considered during deserialization.
Returns:
A Keras model instance (uncompiled).
Raises:
TypeError: if `config` is not a dictionary.
"""
if isinstance(config, list):
raise TypeError('`model_from_config` expects a dictionary, not a list. '
'Maybe you meant to use '
'`Sequential.from_config(config)`?')
from tensorflow.python.keras.layers import deserialize # pylint: disable=g-import-not-at-top
return deserialize(config, custom_objects=custom_objects)
@keras_export('keras.models.model_from_yaml')
def model_from_yaml(yaml_string, custom_objects=None):
"""Parses a yaml model configuration file and returns a model instance.
Usage:
>>> model = tf.keras.Sequential([
... tf.keras.layers.Dense(5, input_shape=(3,)),
... tf.keras.layers.Softmax()])
>>> try:
... import yaml
... config = model.to_yaml()
... loaded_model = tf.keras.models.model_from_yaml(config)
... except ImportError:
... pass
Arguments:
yaml_string: YAML string or open file encoding a model configuration.
custom_objects: Optional dictionary mapping names
(strings) to custom classes or functions to be
considered during deserialization.
Returns:
A Keras model instance (uncompiled).
Raises:
ImportError: if yaml module is not found.
"""
if yaml is None:
raise ImportError('Requires yaml module installed (`pip install pyyaml`).')
# The method unsafe_load only exists in PyYAML 5.x+, so which branch of the
# try block is covered by tests depends on the installed version of PyYAML.
try:
# PyYAML 5.x+
config = yaml.unsafe_load(yaml_string)
except AttributeError:
config = yaml.load(yaml_string)
from tensorflow.python.keras.layers import deserialize # pylint: disable=g-import-not-at-top
return deserialize(config, custom_objects=custom_objects)
@keras_export('keras.models.model_from_json')
def model_from_json(json_string, custom_objects=None):
"""Parses a JSON model configuration string and returns a model instance.
Usage:
>>> model = tf.keras.Sequential([
... tf.keras.layers.Dense(5, input_shape=(3,)),
... tf.keras.layers.Softmax()])
>>> config = model.to_json()
>>> loaded_model = tf.keras.models.model_from_json(config)
Arguments:
json_string: JSON string encoding a model configuration.
custom_objects: Optional dictionary mapping names
(strings) to custom classes or functions to be
considered during deserialization.
Returns:
A Keras model instance (uncompiled).
"""
config = json.loads(json_string)
from tensorflow.python.keras.layers import deserialize # pylint: disable=g-import-not-at-top
return deserialize(config, custom_objects=custom_objects)
|
Python
| 0.000001 |
@@ -1261,17 +1261,16 @@
sage:%0A
-%5C
%60%60%60%0A #
@@ -1437,9 +1437,8 @@
)%0A
-%5C
%60%60%60%0A
|
b7a019b41cbfac78ff48fe604d401921786d7459
|
Add size_continuous_layer helper tests
|
test/viz/helpers/test_size_continuous_layer.py
|
test/viz/helpers/test_size_continuous_layer.py
|
Python
| 0.000001 |
@@ -0,0 +1,2520 @@
+import unittest%0Afrom unittest.mock import Mock%0Afrom cartoframes.viz import helpers, Source%0A%0A%0Aclass TestSizeContinuousLayerHelper(unittest.TestCase):%0A def test_helpers(self):%0A %22should be defined%22%0A self.assertNotEqual(helpers.size_continuous_layer, None)%0A%0A def test_size_continuous_layer(self):%0A %22should create a layer with the proper attributes%22%0A layer = helpers.size_continuous_layer(%0A source='sf_neighborhoods',%0A value='name'%0A )%0A%0A self.assertNotEqual(layer.style, None)%0A self.assertEqual(layer.style._style%5B'point'%5D%5B'width'%5D, 'ramp(linear(sqrt($name), sqrt(globalMin($name)), sqrt(globalMax($name))), %5B2, 50%5D)')%0A self.assertEqual(layer.style._style%5B'line'%5D%5B'width'%5D, 'ramp(linear($name), %5B1, 10%5D)')%0A self.assertEqual(layer.style._style%5B'point'%5D%5B'color'%5D, 'opacity(#F46D43, 0.8)')%0A self.assertEqual(layer.style._style%5B'line'%5D%5B'color'%5D, 'opacity(#4CC8A3, 0.8)')%0A self.assertNotEqual(layer.popup, None)%0A self.assertEqual(layer.popup._hover, %5B%7B%0A 'title': 'name',%0A 'value': '$name'%0A %7D%5D)%0A%0A self.assertNotEqual(layer.legend, None)%0A self.assertEqual(layer.legend._type, 'size-continuous')%0A self.assertEqual(layer.legend._title, 'name')%0A self.assertEqual(layer.legend._description, '')%0A%0A def test_size_continuous_layer_point(self):%0A %22should create a point type layer%22%0A layer = helpers.size_continuous_layer(%0A 'sf_neighborhoods',%0A 'name',%0A 'Neighborhoods',%0A size=%5B10, 20%5D,%0A color='blue'%0A )%0A%0A self.assertEqual(%0A layer.style._style%5B'point'%5D%5B'width'%5D,%0A 'ramp(linear(sqrt($name), sqrt(globalMin($name)), sqrt(globalMax($name))), %5B10, 20%5D)'%0A )%0A self.assertEqual(%0A layer.style._style%5B'point'%5D%5B'color'%5D,%0A 'opacity(blue, 0.8)'%0A )%0A%0A def test_size_continuous_layer_line(self):%0A %22should create a line type layer%22%0A Source._get_geom_type = Mock(return_value='line')%0A%0A layer = helpers.size_continuous_layer(%0A 'sf_neighborhoods',%0A 'name',%0A 'Neighborhoods',%0A size=%5B10, 20%5D,%0A color='blue'%0A )%0A%0A self.assertEqual(%0A layer.style._style%5B'line'%5D%5B'width'%5D,%0A 'ramp(linear($name), %5B10, 20%5D)'%0A )%0A self.assertEqual(%0A layer.style._style%5B'line'%5D%5B'color'%5D,%0A 'opacity(blue, 0.8)'%0A )%0A
|
|
e79445de75721b0d0b8ab1b6c8e24f036bf35a11
|
make qsub
|
nexus_obj/ascii_txt.py
|
nexus_obj/ascii_txt.py
|
Python
| 0.999677 |
@@ -0,0 +1,700 @@
+import os%0A%0Adef qsub_file(fnames,nmpi=64,title='title',hours=2):%0A header = %22%22%22#!/bin/bash%0A#PBS -N %25s%0A#PBS -l walltime=0%25d:00:00%0A#PBS -l nodes=%25d%0A%0A#PBS -A mat158%0A#PBS -j oe%0A#PBS -k n%0Acd $%7BPBS_O_WORKDIR%7D%0Aexport OMP_NUM_THREADS=8%0A%0ABIN=~/soft/kylin_qmcpack/qmcpack_cpu_comp%5Cn%5Cn%22%22%22 %25 (%0A title,%0A hours,%0A len(fnames)*nmpi/2%0A )%0A%0A body = 'cwd=%60pwd%60%5Cn'%0A for floc in fnames:%0A fname = os.path.basename(floc)%0A rundir = os.path.dirname(floc)%0A move_cmd = 'cd '+rundir%0A run_cmd = 'aprun -n %25d -d 8 -S 1 $BIN '%25nmpi + fname + ' %3E out 2%3E err&'%0A body += '%5Cn'.join(%5Bmove_cmd,run_cmd,'cd $cwd'%5D) + '%5Cn'%0A # end for fname%0A body += '%5Cnwait'%0A%0A text = header + body%0A return text%0A# end def qsub_file%0A
|
|
430c5301d7db50b153b0ae33f5c281506948099c
|
Add new package
|
plasTeX/Packages/afterpage.py
|
plasTeX/Packages/afterpage.py
|
Python
| 0.000001 |
@@ -0,0 +1,202 @@
+#!/usr/bin/env python%0A%0Afrom plasTeX import Command, Environment%0A%0Aclass afterpage(Command):%0A args = 'self:nox'%0A%0A def invoke(self, tex):%0A super(afterpage, self).invoke(tex)%0A return %5B%5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.