commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
1f731dcbfcff76ba63e4aea4fc05a15dd5021daa
Test for overriding sys.stdout.
tests/io/sys_stdio_override.py
tests/io/sys_stdio_override.py
Python
0.000009
@@ -0,0 +1,317 @@ +try:%0A import uio as io%0Aexcept ImportError:%0A try:%0A import io%0A except ImportError:%0A print(%22SKIP%22)%0A raise SystemExit%0A%0Aimport sys%0A%0Atry:%0A sys.stdout = sys.stdout%0Aexcept AttributeError:%0A print(%22SKIP%22)%0A raise SystemExit%0A%0A%0Abuf = io.StringIO()%0Asys.stdout = buf%0Aprint(1, %22test%22, 10 + 20)%0A
855d10b768fbfec7772f8e5df4c181d971fe0dd4
add tests.
tests/test_primesieve_array.py
tests/test_primesieve_array.py
Python
0
@@ -0,0 +1,387 @@ +from primesieve.array import n_primes, primes%0A%0Adef assert_array_equal(have, want):%0A assert list(have) == want%0A%0Adef test_primes_array():%0A assert_array_equal(primes(10), %5B2,3,5,7%5D)%0A assert_array_equal(primes(10, 20), %5B11,13,17,19%5D)%0A%0Adef test_n_primes_array():%0A assert_array_equal(n_primes(7), %5B2,3,5,7,11,13,17%5D)%0A assert_array_equal(n_primes(5, 100), %5B101,103,107,109,113%5D)%0A
9de475e8007b209d005ed222686cb46bddef053d
Integrate LLVM at llvm/llvm-project@9e37b1e5a0c1
third_party/llvm/workspace.bzl
third_party/llvm/workspace.bzl
"""Provides the repository macro to import LLVM.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(name): """Imports LLVM.""" LLVM_COMMIT = "ab85996e475ceddfda82255c314229ac0c0f4994" LLVM_SHA256 = "140b4198fa4f0ec1917a0e252feec5e19ccd9d7e96fc818c555b5551c796ec5b" tf_http_archive( name = name, sha256 = LLVM_SHA256, strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT), urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), ], build_file = "//third_party/llvm:llvm.BUILD", patch_file = [ "//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved "//third_party/llvm:build.patch", "//third_party/llvm:toolchains.patch", ], link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"}, )
Python
0.000003
@@ -160,133 +160,133 @@ = %22 -ab85996e475ceddfda82255c314229ac0c0f4994%22%0A LLVM_SHA256 = %22140b4198fa4f0ec1917a0e252feec5e19ccd9d7e96fc818c555b5551c796ec5b +9e37b1e5a0c15f36c5642406d5aa02a657a0b19c%22%0A LLVM_SHA256 = %22e2cca91a76ee6b44a6af91874e429af582b248b96ccd139373fec69ed0b0215f %22%0A%0A
c6b0b5a8cef752481d5ec6672313ec8829d4299f
Create saving.py
Webpage/cgi-bin/saving.py
Webpage/cgi-bin/saving.py
Python
0.000001
@@ -0,0 +1,1545 @@ +#!/usr/bin/python3%0Aimport os%0Aimport os.path%0Aimport cgi, cgitb%0Aimport re%0Aimport pickle%0A%0A#own packages%0Aimport dbcPattern%0A%0A%0Adef dbc_main(): # NEW except for the call to processInput%0A%09form = cgi.FieldStorage() # standard cgi script lines to here!%0A %0A # use format of next two lines with YOUR names and default data%0A%09pagedata = form%5B'webpage'%5D.value%0A%09SRate = form%5B'SampleRates'%5D.value%0A%09StartVal = form%5B'StartVal'%5D.value%0A%09if pagedata:%0A%09%09contents = processInput(pagedata, SRate, StartVal) # process input into a page%0A%09%09print(contents)%0A%09return -1%0Adef processInput(pagedata, SRate, StartVal):%0A%09i=0 %0A%09file=open(%22../saved.txt%22,%22w%22)%0A%09file.write(pagedata)%0A%09if SRate:%0A%09%09SRates_list=SRate.split(',')%0A%09if StartVal:%0A%09%09StartVal_list=StartVal.split(',')%09%0A%09file.write(%22%5CnEndeHTML%22)%0A%09for rate in SRates_list:%0A%09%09file.write('SampleRate '+i+%22: %22+rate)%0A%09%09i++%0A%09file.write(%22%5CnEndeRates%22)%0A%09for values in StartVal_list:%0A%09%09file.write('StartValue '+i+%22: %22+rate)%09%0A%09%09i++%0A%09file.write(%22%5CnEndeValues%22)%0A%09file.close()%0A%09return createHTML()%0A%09%0Adef createHTML(sig_num, sig_list):%0A%09signale=%22%22%0A%09i=0%0A%09file=open(%22Header_Saved.html%22)%0A%09html_string = file.read()%0A%09file.close()%0A%09savings=open(%22Header_Saved.html%22)%0A%09for line in savings:%0A%09%09if re.match(%22EndeHTML%22,line):%0A%09%09%09break%0A%09%09else:%0A%09%09%09html_string+=line%0A%09savings.close()%0A%09return html_string%0A%09%09%09%0A%0A#Muss sp%C3%A4ter ins Hauptprogramm kopiert werden%0Atry: # NEW%0A%09cgitb.enable()%0A%09print(%22Content-Type: text/html;charset:UTF-8%22) # say generating html%0A%09print(%22%5Cn%5Cn%22)%0A%09dbc_main()%0Aexcept:%0A cgi.print_exception() # catch and print errors%0A%09%0A
8c6335c7ba7ebb34566603eb2943752fd3f524db
Add Exercise 9.13.
Kane1985/Chapter5/Ex9.13.py
Kane1985/Chapter5/Ex9.13.py
Python
0.000001
@@ -0,0 +1,2044 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22Exercise 9.13 from Kane 1985.%22%22%22%0A%0Afrom __future__ import division%0Afrom sympy import expand, symbols%0Afrom sympy.physics.mechanics import ReferenceFrame, Point%0Afrom sympy.physics.mechanics import dynamicsymbols%0Afrom util import msprint, partial_velocities%0Afrom util import function_from_partials, generalized_active_forces%0A%0A%0Aq1, q2 = q = dynamicsymbols('q1:3')%0Aq1d, q2d = qd = dynamicsymbols('q1:3', level=1)%0Au1, u2 = u = dynamicsymbols('u1:3')%0A# L' is the natural length of the springs%0Aalpha, beta, L1, L2, k1, k2 = symbols('%CE%B1 %CE%B2 L1 L2 k1 k2',%0A real=True, positive=True)%0A%0A# reference frames%0AN = ReferenceFrame('N')%0A%0A# define points%0ApO = Point('O') # point O is fixed on the wall%0ApB1 = pO.locatenew('B1', (L1 + q1)*N.x) # treat block 1 as a point mass%0ApB2 = pB1.locatenew('B2', (L2 + q2)*N.x) # treat block 2 as a point mass%0ApB1.set_vel(N, pB1.pos_from(pO).dt(N))%0ApB2.set_vel(N, pB2.pos_from(pO).dt(N))%0A%0A# kinematic differential equations%0Akde_map = dict(zip(map(lambda x: x.diff(), q), u))%0A%0A# forces%0A#spring_forces = %5B(pB1, -k1 * q1 * N.x),%0A# (pB1, k2 * q2 * N.x),%0A# (pB2, -k2 * q2 * N.x)%5D%0Adashpot_forces = %5B(pB1, beta * q2d * N.x),%0A (pB2, -beta * q2d * N.x),%0A (pB2, -alpha * (q1d + q2d) * N.x)%5D%0A#forces = spring_forces + dashpot_forces%0A%0Apartials_c = partial_velocities(zip(*dashpot_forces)%5B0%5D, u, N, kde_map)%0AFr_c, _ = generalized_active_forces(partials_c, dashpot_forces)%0A#print('generalized active forces due to dashpot forces')%0A#for i, fr in enumerate(Fr_c, 1):%0A# print('(F%7B0%7D)c = %7B1%7D = -%E2%88%82%E2%84%B1/%E2%88%82u%7B0%7D'.format(i, msprint(fr)))%0A%0Adissipation_function = function_from_partials(%0A map(lambda x: -x.subs(kde_map), Fr_c), u, zero_constants=True)%0Aprint('%E2%84%B1 = %7B0%7D'.format(msprint(dissipation_function)))%0A%0Adissipation_function_expected = (alpha*u1**2 + 2*alpha*u1*u2 +%0A (alpha + beta)*u2**2)/2%0Aassert expand(dissipation_function - dissipation_function_expected) == 0%0A
5a2f8967ac09b3aa1fc1cda21fd6dc5cf1d3f896
Add gesture recognition prototype
gesture_recognition/__init__.py
gesture_recognition/__init__.py
Python
0.000002
@@ -0,0 +1,2471 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport pygame%0Afrom pygame import camera%0Afrom pygame.constants import QUIT, K_ESCAPE, KEYDOWN%0Aimport numpy as np%0A%0A%0Aclass Capture(object):%0A def __init__(self):%0A camera.init()%0A self.size = (640, 480, )%0A # create a display surface. standard pygame stuff%0A self.display = pygame.display.set_mode(self.size, 0)%0A%0A # this is the same as what we saw before%0A self.clist = pygame.camera.list_cameras()%0A if not self.clist:%0A raise ValueError(%22Sorry, no cameras detected.%22)%0A self.cam = pygame.camera.Camera(self.clist%5B0%5D, self.size)%0A self.cam.start()%0A%0A # create a surface to capture to. for performance purposes%0A # bit depth is the same as that of the display surface.%0A self.snapshot = pygame.surface.Surface(self.size, 0, self.display)%0A self.thresholded = pygame.surface.Surface(self.size, 0, self.display)%0A self.previous_pixels = None%0A%0A def get_and_flip(self):%0A # if you don't want to tie the framerate to the camera, you can check%0A # if the camera has an image ready. note that while this works%0A # on most cameras, some will never return true.%0A if self.cam.query_image():%0A self.snapshot = self.cam.get_image(self.snapshot)%0A pixels = pygame.surfarray.array3d(self.snapshot).astype(np.int) # np.int to make it signed%0A if self.previous_pixels is not None:%0A # Get image difference%0A p = np.subtract(pixels, self.previous_pixels)%0A # Reset all pixels below threshold%0A threshold = 30%0A bool_matrix = np.logical_and(p %3C threshold, p %3E -threshold)%0A p%5Bbool_matrix%5D = 0%0A # p%5Bnp.invert(bool_matrix)%5D = 200%0A # Show differential image%0A self.snapshot = pygame.surfarray.make_surface(p)%0A self.previous_pixels = pixels%0A%0A # blit it to the display surface. simple!%0A self.display.blit(self.snapshot, (0,0))%0A pygame.display.flip()%0A%0A def main(self):%0A going = True%0A while going:%0A events = pygame.event.get()%0A for e in events:%0A if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE):%0A # close the camera safely%0A self.cam.stop()%0A going = False%0A%0A self.get_and_flip()%0A%0A%0ACapture().main()%0A
41ee54414845c3d8c1592048fe2f7cee57153eee
Add Python Numpy and Pandas cheatsheet
pythonCheatsheet.py
pythonCheatsheet.py
Python
0.000001
@@ -0,0 +1,61 @@ +#!/usr/local/bin/python%0A# Python Numpy and Pandas Cheatsheet%0A
a813d79ccd63c9ff40afaf3fda4e2c8c0a37ee25
Add wsgi file
readthedocs/wsgi.py
readthedocs/wsgi.py
Python
0.000001
@@ -0,0 +1,467 @@ +import os%0A%0Aos.environ.setdefault(%22DJANGO_SETTINGS_MODULE%22, %22settings%22)%0A%0A# This application object is used by any WSGI server configured to use this%0A# file. This includes Django's development server, if the WSGI_APPLICATION%0A# setting points here.%0Afrom django.core.wsgi import get_wsgi_application%0Aapplication = get_wsgi_application()%0A%0A# Apply WSGI middleware here.%0A# from helloworld.wsgi import HelloWorldApplication%0A# application = HelloWorldApplication(application)%0A
3d0ac61fa03ab27c567155f989db0ceb2134c9e0
adding test_hello
test_hello.py
test_hello.py
Python
0.999096
@@ -0,0 +1,21 @@ +print %22Hello World%22%0A%0A
7e9794dc98a268479f0f57128effc67f88586c8f
Add default message for list pages
bvspca/core/migrations/0025_auto_20180202_1214.py
bvspca/core/migrations/0025_auto_20180202_1214.py
Python
0.000001
@@ -0,0 +1,495 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.9 on 2018-02-02 19:14%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('core', '0024_contentindexpage_empty_message'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='contentindexpage',%0A name='empty_message',%0A field=models.CharField(default='Empty', max_length=200),%0A ),%0A %5D%0A
63db1dc6c23c4afd41bca5cf06207e383c982b51
structure of the commandprocessor
app/core/commandprocessor.py
app/core/commandprocessor.py
Python
0.000015
@@ -0,0 +1,125 @@ +class CommandProcessor:%0A%09def parseCommand(self):%0A%09%09pass%0A%09%0A%09def constructUrl(self):%0A%09%09pass%0A%0A%09def processCommand(self):%0A%09%09pass%0A
2c687118a9aa248d6e6f28259d8a81217ee9cb1d
add solution for Number of Digit One
algorithms/numberOfDigitOne/numberOfDigitOne.py
algorithms/numberOfDigitOne/numberOfDigitOne.py
Python
0.999957
@@ -0,0 +1,455 @@ +class Solution:%0A # @param %7Binteger%7D n%0A # @return %7Binteger%7D%0A%0A def countDigitOne(self, n):%0A res = prev = 0%0A x = 1%0A while n %3E 0: # n = 23%5By%5Dxxx%0A y = n %25 10%0A n /= 10%0A if y %3E 1:%0A res += x # 23%5B2%5Dxxx%0A elif y == 1:%0A res += prev + 1 # 23%5B1%5Dxxx%0A res += n * x # 0%5B1%5Dxxx ~ 22%5B1%5Dxxx%0A prev += y * x%0A x *= 10%0A return res%0A
67dfcd5abb73aff7fd416f665de0d8461ba3e8b4
Create Subset.py
Tests/Subset.py
Tests/Subset.py
Python
0.000001
@@ -0,0 +1,2352 @@ +__author__ = 'Marius Wirtz'%0A%0Afrom TM1py import TM1Queries, Subset%0Aimport uuid%0Aimport json%0Aimport unittest%0A%0A%0Aclass TestAnnotationMethods(unittest.TestCase):%0A q = TM1Queries(ip='', port=8008, user='admin', password='apple', ssl=True)%0A random_string1 = str(uuid.uuid4()).replace('-', '_')%0A random_string2 = str(uuid.uuid4()).replace('-', '_')%0A%0A # 1. create subset%0A def test_create_subset(self):%0A s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string1,%0A elements=%5B'10110', '10300', '10210', '10000'%5D)%0A response = self.q.create_subset(s)%0A print(response)%0A response_as_dict = json.loads(response)%0A name_in_response = response_as_dict%5B'Name'%5D%0A self.assertEqual(self.random_string1, name_in_response)%0A%0A s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string2,%0A expression='%7B HIERARCHIZE( %7BTM1SUBSETALL( %5Bplan_business_unit%5D )%7D ) %7D')%0A response = self.q.create_subset(s)%0A response_as_dict = json.loads(response)%0A name_in_response = response_as_dict%5B'Name'%5D%0A self.assertEqual(self.random_string2, name_in_response)%0A%0A # 2. get subset%0A def test_get_subset(self):%0A s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')%0A self.assertIsInstance(s, Subset)%0A%0A s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')%0A self.assertIsInstance(s, Subset)%0A%0A # 3. update subset%0A def test_update_subset(self):%0A s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')%0A s.add_elements(%5B'10110'%5D)%0A self.q.update_subset(s)%0A%0A s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')%0A s.set_expression('%7B HIERARCHIZE( %7BTM1SUBSETALL( %5Bplan_business_unit%5D )%7D ) %7D')%0A self.q.update_subset(s)%0A%0A # 4. delete subset%0A def test_delete_subset(self):%0A response = self.q.delete_subset('plan_business_unit', self.random_string1)%0A self.assertEqual(response, '')%0A response = self.q.delete_subset('plan_business_unit', self.random_string2)%0A self.assertEqual(response, '')%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
93d6915c0e45d1873a48c298749d6956edbc337e
add remote ssh capability to fs-drift.py
ssh_thread.py
ssh_thread.py
Python
0.000001
@@ -0,0 +1,1140 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0A'''%0Assh_thread.py -- manages parallel execution of shell commands on remote hosts%0ACopyright 2012 -- Ben England%0ALicensed under the Apache License at http://www.apache.org/licenses/LICENSE-2.0%0ASee Appendix on this page for instructions pertaining to license.%0A'''%0A%0Aimport threading%0Aimport os%0A%0A%0A# this class is just used to create a python thread%0A# for each remote host that we want to use as a workload generator%0A# the thread just executes an ssh command to run this program on a remote host%0A%0Aclass ssh_thread(threading.Thread):%0A%0A ssh_prefix = 'ssh -x -o StrictHostKeyChecking=no '%0A%0A def __str__(self):%0A return 'ssh-thread:%25s:%25s:%25s' %25 %5C%0A (self.remote_host, str(self.status), self.remote_cmd)%0A%0A def __init__(self, remote_host, remote_cmd_in):%0A threading.Thread.__init__(self)%0A self.remote_host = remote_host%0A self.remote_cmd = '%25s %25s %22%25s%22' %25 %5C%0A (self.ssh_prefix, self.remote_host, remote_cmd_in)%0A # print('thread cmd %25s'%25self.remote_cmd)%0A self.status = None%0A%0A def run(self):%0A self.status = os.system(self.remote_cmd)%0A
88e05bd1fe0f2e46e740a3d8d631d4a810c155a6
Complete P8
Quiz/Problem8_satisfiesF.py
Quiz/Problem8_satisfiesF.py
Python
0
@@ -0,0 +1,631 @@ +def satisfiesF(L):%0A %22%22%22%0A Assumes L is a list of strings%0A Assume function f is already defined for you and it maps a string to a Boolean%0A Mutates L such that it contains all of the strings, s, originally in L such%0A that f(s) returns True, and no other elements%0A Returns the length of L after mutation%0A %22%22%22%0A Lclone = L%5B:%5D%0A for i in Lclone:%0A if not f(i):%0A L.remove(i)%0A return len(L)%0A%0A#-----used for submitting-----%0A# run_satisfiesF(L, satisfiesF)%0A%0A%0A#-----test case-----%0A# def f(s):%0A# return 'a' in s%0A# %0A# L = %5B'a', 'b', 'bc', 'c', 'ab'%5D%0A# print satisfiesF(L)%0A# print L
820fe44762f0037eaacba9b7bf4129a29e25e799
add migration
accelerator/migrations/0036_add_user_deferrable_modal.py
accelerator/migrations/0036_add_user_deferrable_modal.py
Python
0
@@ -0,0 +1,1688 @@ +# Generated by Django 2.2.10 on 2021-03-03 17:08%0A%0Afrom django.conf import settings%0Afrom django.db import (%0A migrations,%0A models,%0A)%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A migrations.swappable_dependency(settings.AUTH_USER_MODEL),%0A ('accelerator', '0035_add_deferrable_modal_model'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='UserDeferrableModal',%0A fields=%5B%0A ('id', models.AutoField(%0A auto_created=True,%0A primary_key=True,%0A serialize=False,%0A verbose_name='ID')),%0A ('created_at', models.DateTimeField(%0A auto_now_add=True,%0A null=True)),%0A ('updated_at', models.DateTimeField(%0A auto_now=True,%0A null=True)),%0A ('is_deferred', models.BooleanField(default=False)),%0A ('deferred_to', models.DateTimeField(%0A blank=True,%0A null=True)),%0A ('deferrable_modal', models.ForeignKey(%0A on_delete=django.db.models.deletion.CASCADE,%0A to=settings.ACCELERATOR_DEFERRABLEMODAL_MODEL)),%0A ('user', models.ForeignKey(%0A on_delete=django.db.models.deletion.CASCADE,%0A to=settings.AUTH_USER_MODEL)),%0A %5D,%0A options=%7B%0A 'verbose_name': 'User Deferrable Modal',%0A 'abstract': False,%0A 'managed': True,%0A 'swappable': None,%0A %7D,%0A ),%0A %5D%0A
a5cabf4b778d03cac472e22b0e62bc262796b5ff
Add tests for `cms.templatetags.pagination`.
cms/tests/test_templatetags_pagination.py
cms/tests/test_templatetags_pagination.py
Python
0
@@ -0,0 +1,1180 @@ +from django.http import Http404%0Afrom django.test import RequestFactory, TestCase%0A%0Afrom ..templatetags.pagination import paginate, pagination, pagination_url%0A%0A%0Aclass Object(object):%0A paginator = None%0A%0A%0Aclass PaginationTest(TestCase):%0A%0A def setUp(self):%0A self.factory = RequestFactory()%0A self.request = self.factory.get('/')%0A%0A def test_paginate(self):%0A paginate_response = paginate(%7B'request': self.request%7D, %5B%5D)%0A self.assertEqual(repr(paginate_response), '%3CPage 1 of 1%3E')%0A%0A with self.assertRaises(Http404):%0A self.request = self.factory.get('/?page=2')%0A paginate(%7B'request': self.request%7D, %5B%5D)%0A%0A def test_pagination(self):%0A obj = Object()%0A pagination_response = pagination(%7B'request': self.request%7D, obj)%0A%0A self.assertDictEqual(pagination_response, %7B%0A 'paginator': None,%0A 'pagination_key': 'page',%0A 'page_obj': obj,%0A 'request': self.request,%0A %7D)%0A%0A def test_pagination_url(self):%0A self.assertEqual(pagination_url(%7B'request': self.request%7D, 1), '/')%0A self.assertEqual(pagination_url(%7B'request': self.request%7D, 2), '/?page=2')%0A
aa218407a9efdde9daa53d638fdfdacff873f14b
test change
clients/python/flask-server/tests/client_tests.py
clients/python/flask-server/tests/client_tests.py
Python
0.000002
@@ -0,0 +1,1561 @@ +%22%22%22%0Aintegration test in python%0A!!!! NEED PYTHON 2.7.8%0A%22%22%22%0A%0Aimport unittest%0Aimport urllib2%0Aimport urllib%0Aimport thread%0Aimport time%0Aimport json%0Afrom client import app%0Afrom multiprocessing import Process%0A%0Aclass ServerHandlerTest(unittest.TestCase):%0A server = Process(target=app.run)%0A%0A @classmethod%0A def setUpClass(cls):%0A cls.server.start()%0A time.sleep(1)%0A%0A @classmethod%0A def tearDownClass(cls):%0A cls.server.terminate()%0A cls.server.join()%0A%0A def assertContent(self, content, response):%0A for line in response.readlines():%0A if line == content:%0A found = True%0A self.assertTrue(found)%0A%0A def test_should_call_get(self):%0A response = urllib2.urlopen(%22http://localhost:5000/%22)%0A self.assertContent('hello world', response)%0A%0A def test_should_call_post_ping(self):%0A data = urllib.urlencode(%7B'q': 'Ping'%7D)%0A response = urllib2.urlopen(%22http://localhost:5000/ping%22, data)%0A self.assertContent('pong', response)%0A%0A def test_should_call_post_order(self):%0A req = urllib2.Request('http://localhost:5000/order')%0A req.add_header('Content-Type', 'application/json')%0A response = urllib2.urlopen(req, json.dumps(%7B'q': 'Path'%7D))%0A self.assertEqual(json.loads(response.read()), %7Bu'total' : 1000%7D)%0A%0A @unittest.expectedFailure%0A def test_should_call_post_unknown(self):%0A data = urllib.urlencode(%7B'answer': 'hello'%7D)%0A urllib2.urlopen(%22http://localhost:5000/unknown%22, data)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
e2ed85ae1bb3f647095abb00b118cf06ae7549aa
add setup (even if not really needed)
0_Python/setup.py
0_Python/setup.py
Python
0
@@ -0,0 +1,270 @@ +#!/usr/bin/python%0A%0Afrom distutils.core import setup%0Afrom Cython.Distutils import build_ext%0Afrom distutils.extension import Extension%0A%0Acy_mod = Extension(%22inside_polygon%22,%0A%09sources=%09%5B%22inside_polygon.pyx%22%5D)%0A%0Asetup(ext_modules=%5Bcy_mod%5D,%0A%09cmdclass=%7B'build_ext': build_ext%7D)%0A
cd1c88c519a7079b2cef752473e5da3ddb4224e3
Add stress package (#3695)
var/spack/repos/builtin/packages/stress/package.py
var/spack/repos/builtin/packages/stress/package.py
Python
0
@@ -0,0 +1,1722 @@ +##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass Stress(AutotoolsPackage):%0A %22%22%22stress is a deliberately simple workload generator for POSIX systems.%0A It imposes a configurable amount of CPU, memory, I/O, and disk stress on%0A the system. It is written in C, and is free software licensed under the%0A GPLv2.%22%22%22%0A%0A homepage = %22https://people.seas.harvard.edu/~apw/stress/%22%0A url = %22https://people.seas.harvard.edu/~apw/stress/stress-1.0.4.tar.gz%22%0A%0A version('1.0.4', '890a4236dd1656792f3ef9a190cf99ef')%0A
9ce90bc43cfcc5a56be958671f304e7929eb0446
Add missing migration step dua changes in model
cmsplugin_collapse/migrations/0002_auto_20160210_0651.py
cmsplugin_collapse/migrations/0002_auto_20160210_0651.py
Python
0
@@ -0,0 +1,537 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('cmsplugin_collapse', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='accordionheader',%0A name='show_first',%0A field=models.BooleanField(default=True, help_text='If selected, the first collapsible will be displayed in the open state.'),%0A preserve_default=True,%0A ),%0A %5D%0A
b0ea743fa320f0df6e35b4381e6bd778906a5532
Add caching mechanism
labmanager/rlms/caches.py
labmanager/rlms/caches.py
Python
0.000001
@@ -0,0 +1,2321 @@ +import calendar%0A%0Afrom cachecontrol import CacheControl%0Afrom cachecontrol.caches import FileCache%0Afrom cachecontrol.heuristics import LastModified, TIME_FMT%0A%0Afrom email.utils import formatdate, parsedate, parsedate_tz%0A%0Aclass LastModifiedNoDate(LastModified):%0A %22%22%22 This takes the original LastModified implementation of %0A cachecontrol, but defaults the date in case it is not provided.%0A %22%22%22%0A def __init__(self, require_date = True, error_margin = None):%0A if error_margin is None:%0A if require_date:%0A self.error_margin = 0.1%0A else:%0A self.error_margin = 0.2%0A else:%0A self.error_margin = error_margin%0A self.require_date = require_date%0A%0A def update_headers(self, resp):%0A headers = resp.headers%0A if 'expires' in headers:%0A return %7B%7D%0A%0A if 'cache-control' in headers and headers%5B'cache-control'%5D != 'public':%0A return %7B%7D%0A%0A if resp.status not in self.cacheable_by_default_statuses:%0A return %7B%7D%0A%0A if 'last-modified' not in headers:%0A return %7B%7D%0A%0A parsed_date = parsedate_tz(headers.get('date'))%0A if self.require_date and parsed_date is None:%0A return %7B%7D%0A %0A if parsed_date is None:%0A date = time.time()%0A faked_date = True%0A else:%0A date = calendar.timegm(parsed_date)%0A faked_date = False%0A%0A last_modified = parsedate(headers%5B'last-modified'%5D)%0A if last_modified is None:%0A return %7B%7D%0A%0A now = time.time()%0A current_age = max(0, now - date)%0A delta = date - calendar.timegm(last_modified)%0A freshness_lifetime = max(0, min(delta * self.error_margin, 24 * 3600))%0A if freshness_lifetime %3C= current_age:%0A return %7B%7D%0A%0A expires = date + freshness_lifetime%0A new_headers = %7B'expires': time.strftime(TIME_FMT, time.gmtime(expires))%7D%0A if faked_date:%0A new_headers%5B'date'%5D = time.strftime(TIME_FMT, time.gmtime(date))%0A return new_headers%0A%0A def warning(self, resp):%0A return None%0A%0Adef get_cached_session():%0A CACHE_DIR = 'web_cache'%0A return CacheControl(requests.Session(),%0A cache=FileCache(CACHE_DIR), heuristic=LastModifiedNoDate(require_date=False))%0A%0A%0A
b6ac6a73cf10372be3384dbeb99b82b137a9daa2
Use chevrons instead of arrows in sortable links
oscar/templatetags/sorting_tags.py
oscar/templatetags/sorting_tags.py
# This is a rewrite of django-sorting but with added support for i18n title # strings. # See https://github.com/directeur/django-sorting from django import template from django.conf import settings register = template.Library() DEFAULT_SORT_UP = getattr(settings, 'DEFAULT_SORT_UP', '&uarr;') DEFAULT_SORT_DOWN = getattr(settings, 'DEFAULT_SORT_DOWN', '&darr;') sort_directions = { 'asc': {'icon': DEFAULT_SORT_UP, 'inverse': 'desc'}, 'desc': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'}, '': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'}, } def anchor(parser, token): bits = token.split_contents() if len(bits) < 2: raise template.TemplateSyntaxError( "anchor tag takes at least 1 argument") try: title = bits[2] except IndexError: title = bits[1].capitalize() return SortAnchorNode(bits[1].strip(), title.strip()) class SortAnchorNode(template.Node): def __init__(self, field, title): self.field = template.Variable(field) self.title = template.Variable(title) def render(self, context): field = self.field.resolve(context) title = self.title.resolve(context) request = context['request'] get_vars = request.GET.copy() sort_field = get_vars.pop('sort', [None])[0] icon = '' if sort_field == field: # We are already sorting on this field, so we set the inverse # direction within the GET params that get used within the href. direction = get_vars.pop('dir', [''])[0] get_vars['dir'] = sort_directions[direction]['inverse'] icon = sort_directions[direction]['icon'] href = u'%s?sort=%s' % (request.path, field) if len(get_vars) > 0: href += "&%s" % get_vars.urlencode() if icon: title = u"%s %s" % (title, icon) return u'<a href="%s">%s</a>' % (href, title) anchor = register.tag(anchor)
Python
0
@@ -242,32 +242,37 @@ RT_UP = getattr( +%0A settings, 'DEFAU @@ -287,16 +287,45 @@ UP', - '&uarr; +%0A '%3Ci class=%22icon-chevron-up%22%3E%3C/i%3E ')%0AD @@ -351,16 +351,21 @@ getattr( +%0A settings @@ -390,16 +390,47 @@ WN', - '&darr; +%0A '%3Ci class=%22icon-chevron-down%22%3E%3C/i%3E ')%0A%0A
38f28bd0e5d4ea5af69ac7ccc553403a85ac61be
add problem 053
problem_053.py
problem_053.py
Python
0.000383
@@ -0,0 +1,881 @@ +#!/usr/bin/env python%0A#-*-coding:utf-8-*-%0A%0A'''%0AThere are exactly ten ways of selecting three from five, 12345:%0A123, 124, 125, 134, 135, 145, 234, 235, 245, and 345%0AIn combinatorics, we use the notation, 5C3 = 10.%0AIn general,%0A%0AnCr =%0An!%0Ar!(n%E2%88%92r)!%0A,where r %E2%89%A4 n, n! = n%C3%97(n%E2%88%921)%C3%97...%C3%973%C3%972%C3%971, and 0! = 1.%0AIt is not until n = 23, that a value exceeds one-million: 23C10 = 1144066.%0A%0AHow many, not necessarily distinct, values of nCr,%0Afor 1 %E2%89%A4 n %E2%89%A4 100, are greater than one-million?%0A'''%0A%0Afrom math import factorial%0Aimport timeit%0A%0A%0Adef calc(nlim, lim):%0A cnt = 0%0A for n in range(nlim+1):%0A for r in range(n+1):%0A ncr = factorial(n)/(factorial(r)*factorial(n-r))%0A if ncr %3E lim:%0A cnt += 1%0A return cnt%0A%0A%0Aif __name__ == '__main__':%0A print calc(100, 1000000)%0A print timeit.Timer('problem_053.calc(100, 1000000)', 'import problem_053').timeit(1)%0A
ac2d5c10e7895515acd63e2ca91924e99ec17003
add (Failing) test
test/test_writing.py
test/test_writing.py
Python
0.000033
@@ -0,0 +1,586 @@ +import RMF%0A%0ARMF.set_log_level(%22trace%22)%0Apath = RMF._get_temporary_file_path(%22writing.rmf%22)%0Aprint path%0Afh = RMF.create_rmf_file(path)%0Afh.add_frame(%22frame%22, RMF.FRAME)%0Afn = fh.get_root_node().add_child(%22frag%22, RMF.REPRESENTATION)%0A%0Apf = RMF.ParticleFactory(fh)%0Aff = RMF.FragmentFactory(fh)%0A%0Apf.get(fn).set_radius(1.0)%0Apf.get(fn).set_mass(2.0)%0Apf.get(fn).set_coordinates(%5B1,2,3%5D)%0Aff.get(fn).set_indexes(%5B1,2,3,4%5D)%0A%0Adel fh%0Afh = RMF.open_rmf_file_read_only(path)%0Afh.set_current_frame(RMF.FrameID(0))%0Afn = fh.get_root_node().get_children()%5B0%5D%0Apf = RMF.ParticleFactory(fh)%0Aassert(pf.get_is(fn))%0A
492d90e1197803f2dbce0b07417d12497c9031fe
Implement away-notify
txircd/modules/ircv3/awaynotify.py
txircd/modules/ircv3/awaynotify.py
Python
0.999988
@@ -0,0 +1,1482 @@ +from twisted.plugin import IPlugin%0Afrom txircd.module_interface import IModuleData, ModuleData%0Afrom zope.interface import implements%0A%0Aclass AwayNotify(ModuleData):%0A%09implements(IPlugin, IModuleData)%0A%09%0A%09name = %22AwayNotify%22%0A%09%0A%09def actions(self):%0A%09%09return %5B (%22usermetadataupdate%22, 10, self.sendAwayNotice),%0A%09%09 (%22capabilitylist%22, 10, self.addCapability) %5D%0A%09%0A%09def load(self):%0A%09%09if %22unloading-away-notify%22 in self.ircd.dataCache:%0A%09%09%09del self.ircd.dataCache%5B%22unloading-away-notify%22%5D%0A%09%09%09return%0A%09%09if %22cap-add%22 in self.ircd.functionCache:%0A%09%09%09self.ircd.functionCache%5B%22cap-add%22%5D(%22away-notify%22)%0A%09%0A%09def unload(self):%0A%09%09self.ircd.dataCache%5B%22unloading-away-notify%22%5D = True%0A%09%0A%09def fullUnload(self):%0A%09%09del self.ircd.dataCache%5B%22unloading-away-notify%22%5D%0A%09%09if %22cap-del%22 in self.ircd.functionCache:%0A%09%09%09self.ircd.functionCache%5B%22cap-del%22%5D(%22away-notify%22)%0A%09%0A%09def addCapability(self, capList):%0A%09%09capList.append(%22away-notify%22)%0A%09%0A%09def sendAwayNotice(self, user, key, oldValue, value, visibility, setByUser, fromServer):%0A%09%09if key != %22away%22:%0A%09%09%09return%0A%09%09if value:%0A%09%09%09for noticeUser in self.ircd.users.itervalues():%0A%09%09%09%09if %22capabilities%22 in noticeUser.cache and %22away-notify%22 in noticeUser.cache%5B%22capabilities%22%5D:%0A%09%09%09%09%09noticeUser.sendMessage(%22AWAY%22, value, sourceuser=user)%0A%09%09else:%0A%09%09%09for noticeUser in self.ircd.users.itervalues():%0A%09%09%09%09if %22capabilities%22 in noticeUser.cache and %22away-notify%22 in noticeUser.cache%5B%22capabilities%22%5D:%0A%09%09%09%09%09noticeUser.sendMessage(%22AWAY%22, sourceuser=user)%0A%0AawayNotify = AwayNotify()
a6ca9fdb71eacffe94fad476712650f82870bb2e
Add base code for choosing solver
pyoommf/sim.py
pyoommf/sim.py
import os from drivers.llg import LLG import oommfmif as o class Sim(object): def __init__(self, mesh, Ms, name=None): self.mesh = mesh self.Ms = Ms self.name = name self.gamma = 2.21e5 self.energies = [] self.N_Sims_Run = 0 # Want some kind of persistent 'knowledge' of number of runs # and the sequence these occur in for data analysis # when we call a simulation multiple times to either # advance time or change parameters. Will need to think carefully # about situations such as changing H_applied - should we recreate this # data from the output files? # Advantage of this is recreating sim object if needed. def add(self, energy): self.energies.append(energy) def set_m(self, m_init): self.m_init = m_init def create_mif(self, overwrite=True): if self.name is None: self.name = 'unnamed' self.mif_filename = self.name + '_iteration' + \ str(self.N_Sims_Run) + '.mif' if os.path.isfile(self.mif_filename): print("DEBUG: This simulation name already exists.") print("DEBUG: Overwriting MIF.") mif_file = open(self.mif_filename, 'w') mif_file.write('# MIF 2.1\n\n') mif_file.write(self.mesh.atlas_mif()) mif_file.write(self.mesh.mesh_mif()) for energy in self.energies: mif_file.write(energy.get_mif()) mif_file.write(self.llg.get_mif()) mif_file.write('Destination mags mmArchive\n\n') mif_file.write( 'Schedule Oxs_TimeDriver::Magnetization mags Stage 1\n\n') mif_file.close() def run_until(self, t, alpha=0.1, gamma=2.21e5): self.llg = LLG(t, self.m_init, self.Ms, alpha, gamma, name=self.name) self.create_mif() self.execute_mif() def execute_mif(self): path = o.retrieve_oommf_path() executable = o.retrieve_oommf_executable(path) process = o.call_oommf('boxsi ' + self.mif_filename) process.wait()
Python
0.000001
@@ -784,16 +784,173 @@ energy)%0A + %0A def set_solver(self, solver='rk4'):%0A %22%22%22%0A Available solvers in OOMMF:%0A rk2, rk2heun, rk4, rkf54, rkf54m, rkf54s %0A %22%22%22 %0A def
cc78dc401b16ff189b86466e3c0cb4609a72af0d
add tester
batch/depute/test.py
batch/depute/test.py
Python
0.000001
@@ -0,0 +1,1514 @@ +#!/usr/bin/env python%0A%0Aimport os, sys, json%0A%0Asplit = False%0Asplitval = False%0Aif len(sys.argv) %3E 1:%0A field = sys.argv%5B1%5D%0A if len(sys.argv) %3E 2:%0A split = True%0A if len(sys.argv) %3E 3:%0A splitval = int(sys.argv%5B3%5D)%0Aelse:%0A field = %22all%22%0A%0Avalues = %7B%7D%0Adef add_value(val):%0A if split and ' / ' in val:%0A for i,v in enumerate(val.split(' / ')):%0A if type(splitval) != int or splitval == i:%0A add_value(v)%0A return%0A if val not in values:%0A values%5Bval%5D = 0%0A values%5Bval%5D += 1%0A%0AMISSING = %5B%5D%0Afor dep in os.listdir('out'):%0A with open(os.path.join('out', dep)) as f:%0A data = json.load(f)%0A if field == %22all%22:%0A for k in data:%0A if data%5Bk%5D and (type(data%5Bk%5D) != list or data%5Bk%5D != %5B%22%22%5D):%0A add_value(k)%0A continue%0A if field in data:%0A if type(data%5Bfield%5D) == list:%0A if data%5Bfield%5D == %5B%22%22%5D:%0A MISSING.append(data%5B%22id_institution%22%5D)%0A for i in data%5Bfield%5D:%0A if i:%0A add_value(i)%0A else: add_value(data%5Bfield%5D)%0A else: MISSING.append(data%5B%22id_institution%22%5D)%0A%0Amiss = len(MISSING)%0Aif miss %3C= 3 and max(values.values()) == 1:%0A print %22ALL UNIQUE FIELDS (%22, len(values), %22)%22%0A sys.exit(0)%0A%0Aif miss %3E 3:%0A print miss, %22MISSING:%22, MISSING%0A%0Aorder = sorted(values, key=lambda x: values%5Bx%5D)%0Aorder.reverse()%0Afor k in order:%0A print k.encode('utf-8'), %22:%22, values%5Bk%5D%0A
b674a3e4de86728139e97bb02fa90a62a7700c31
add speech processing test
testing/miniBrain.py
testing/miniBrain.py
Python
0.000001
@@ -0,0 +1,2561 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0Aimport sys%0Aimport time%0Afrom os.path import dirname, abspath%0Asys.path.append(dirname(dirname(abspath(__file__))))%0Areload(sys)%0Asys.setdefaultencoding('utf-8')%0A%0Afrom EmeraldAI.Pipelines.InputProcessing.ProcessInput import ProcessInput%0Afrom EmeraldAI.Pipelines.ScopeAnalyzer.AnalyzeScope import AnalyzeScope%0Afrom EmeraldAI.Pipelines.ResponseProcessing.ProcessResponse import ProcessResponse%0Afrom EmeraldAI.Pipelines.TextToSpeech.TTS import TTS%0Afrom EmeraldAI.Pipelines.Trainer.Trainer import Trainer%0Afrom EmeraldAI.Entities.User import User%0Afrom EmeraldAI.Entities.Context import Context%0Afrom EmeraldAI.Entities.PipelineArgs import PipelineArgs%0Afrom EmeraldAI.Config.Config import *%0Afrom EmeraldAI.Logic.Audio.SoundMixer import *%0A%0Adef ProcessSpeech(data):%0A print %22ProcessSpeech - Go%22%0A cancelSpeech = False%0A stopwordList = Config().GetList(%22Bot%22, %22StoppwordList%22)%0A if(data in stopwordList):%0A cancelSpeech = True%0A SoundMixer().Stop()%0A%0A print %22ProcessSpeech - No Stopword%22%0A%0A pipelineArgs = PipelineArgs(data)%0A%0A print %22ProcessSpeech - Pipeline Args Created%22%0A%0A pipelineArgs = ProcessInput().ProcessAsync(pipelineArgs)%0A%0A print %22ProcessSpeech - Process Async completed%22%0A%0A pipelineArgs = AnalyzeScope().Process(pipelineArgs)%0A%0A print %22ProcessSpeech - Scope analyzed%22%0A%0A pipelineArgs = ProcessResponse().Process(pipelineArgs)%0A%0A print %22ProcessSpeech - Response processed%22%0A%0A if(not cancelSpeech):%0A if(pipelineArgs.Animation != None):%0A print %22There should have been an animation%22, pipelineArgs.Animation%0A%0A pipelineArgs = TTS().Process(pipelineArgs)%0A print %22ProcessSpeech - TTS Triggered%22%0A%0A trainerResult = Trainer().Process(pipelineArgs)%0A%0A print %22ProcessSpeech - Trainer Done%22%0A%0A Context().History.append(pipelineArgs)%0A%0A print %22Pipeline Args%22, pipelineArgs.toJSON()%0A print %22Main User%22, User().toJSON()%0A print %22Trainer Result: %22, trainerResult%0A print %22Input: %22, data%0A print %22Response: %22, pipelineArgs.Response%0A%0A while SoundMixer().IsPlaying():%0A time.sleep(1)%0A%0A%0Aprint %22Set user...%22%0AUser().SetUserByCVTag(%22Max%22)%0Aprint %22Start Speech processing%22%0AProcessSpeech(%22Warmup%22)%0A%0A#ProcessSpeech(%22Guten Abend%22)%0A%0A#ProcessSpeech(%22Wer ist Angela Merkel%22)%0A%0AProcessSpeech(%22Wieviel ist 432 plus 68%22)%0A%0AProcessSpeech(%22Wieviel ist 4 + 32 / 6%22)%0A%0A#ProcessSpeech(%22Bist du ein Mensch%22)%0A%0A#ProcessSpeech(%22TRIGGER_FACEAPP_OFF%22)%0A%0A#ProcessSpeech(%22Was ist eine S%C3%BC%C3%9Fkartoffel%22)%0A%0A%0Aexit()%0AProcessSpeech(%22xxx%22)%0A%0AProcessSpeech(%22xxx%22)%0A%0AProcessSpeech(%22xxx%22)%0A
842c796a223ee9cb78c69ccb59416a2afe0fcee0
Add tests for Permission class.
tests/permissions.py
tests/permissions.py
Python
0
@@ -0,0 +1,1701 @@ +import unittest%0Afrom permission import Permission, PERMISSION_DELIMITER%0A%0A%0Aclass BasicPermissionTests(unittest.TestCase):%0A%0A def setUp(self):%0A self.p1 = Permission(%22test%7B0%7D1%7B0%7Dhello%22.format(PERMISSION_DELIMITER))%0A self.p2 = Permission(%22test%7B0%7D2%7B0%7Dhello%22.format(PERMISSION_DELIMITER))%0A self.p3 = Permission(%22test%22)%0A self.p4 = Permission(%22test%7B0%7D1%7B0%7Dhello%22.format(PERMISSION_DELIMITER))%0A self.ps1 = %7Bself.p1, self.p2%7D%0A self.ps2 = %7Bself.p1, self.p4%7D%0A self.ps3 = %7Bself.p1%7D%0A%0A def test_equal(self):%0A self.assertEqual(self.p1, self.p4)%0A self.assertNotEqual(self.p1, self.p2)%0A self.assertNotEqual(self.p1, self.p3)%0A self.assertEqual(self.ps2, self.ps3)%0A%0A def test_grants_permission(self):%0A self.assertTrue(self.p1.grants_permission(self.p1))%0A self.assertTrue(self.p1.grants_permission(self.p4))%0A self.assertFalse(self.p1.grants_permission(self.p2))%0A self.assertFalse(self.p1.grants_permission(self.p3))%0A self.assertFalse(self.p3.grants_permission(self.p1))%0A%0A def test_grants_any_permission(self):%0A self.assertTrue(self.p1.grants_any_permission(self.ps1))%0A self.assertTrue(self.p2.grants_any_permission(self.ps1))%0A self.assertFalse(self.p3.grants_any_permission(self.ps1))%0A self.assertTrue(self.p4.grants_any_permission(self.ps1))%0A%0A def test_segments(self):%0A self.assertEqual(self.p1.segments, %5B%22test%22, %221%22, %22hello%22%5D)%0A self.assertEqual(self.p2.segments, %5B%22test%22, %222%22, %22hello%22%5D)%0A self.assertEqual(self.p3.segments, %5B%22test%22%5D)%0A self.assertEqual(self.p1.segments, self.p4.segments)%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A%0A%0A%0A
cb7d205add1d6e114277e596b2023c755dd1ff19
add an example unit test
tests/test_docker.py
tests/test_docker.py
Python
0
@@ -0,0 +1,1391 @@ +%22%22%22 run with%0A%0Apython setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py%0Apython setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py:Test_docker.test_001%0A%0Anosetests -v --nocapture tests/cm_basic/test_var.py%0A%0Aor%0A%0Anosetests -v tests/cm_basic/test_var.py%0A%0A%22%22%22%0Afrom cloudmesh_client.common.Shell import Shell%0Afrom cloudmesh_client.common.util import HEADING%0A%0Afrom cloudmesh_client.var import Var%0A%0A%0Adef run(command):%0A print(command)%0A parameter = command.split(%22 %22)%0A shell_command = parameter%5B0%5D%0A args = parameter%5B1:%5D%0A result = Shell.execute(shell_command, args)%0A print(result)%0A return result%0A%0A%0A# noinspection PyMethodMayBeStatic,PyPep8Naming%0Aclass Test_docker(object):%0A %22%22%22%0A%0A %22%22%22%0A%0A def setup(self):%0A pass%0A%0A def test_003(self):%0A HEADING(%22list docker images%22)%0A result = run(%22cms docker image list%22)%0A print(result)%0A assert %22cms%22 in result # need to make real assertion%0A%0A def test_004(self):%0A HEADING(%22list docker images%22)%0A result = run(%22cms docker container list%22)%0A print(result)%0A assert %22cms%22 in result # need to make real assertion%0A%0A def test_005(self):%0A HEADING(%22list docker images%22)%0A result = run(%22cms docker network list%22)%0A print(result)%0A assert %22cms%22 in result # need to make real assertion%0A
4e0476fa83d0832c328abf00b5167887a0af3fe6
Add tests for hashes
tests/test_hashes.py
tests/test_hashes.py
Python
0.000001
@@ -0,0 +1,244 @@ +from webhooks.hashes import placebo_hash_function, basic_hash_function%0A%0A%0Adef test_placebo():%0A assert placebo_hash_function() == %22%22%0A%0A%0Adef test_basic():%0A hashes = set(%5Bbasic_hash_function() for x in range(30)%5D)%0A assert len(hashes) == 30%0A
ef8ca51dbd9b93a801a4a87be3c04f2c56cdef5a
test for call to enqueue passing
tests/test_server.py
tests/test_server.py
Python
0
@@ -0,0 +1,876 @@ +import hashlib%0Aimport json%0Afrom unittest.mock import Mock%0Afrom unittest.mock import ANY%0A%0Afrom queue_functions import do_work%0Afrom server import handle_post%0Afrom uploaders.s3 import get_url%0Afrom uploaders.s3 import upload%0A%0Adef test_post():%0A q = Mock()%0A filename = 'afakefilename'%0A files = %7B'file': %5B%7B'body': b'a-fake-file-body', 'filename': filename%7D%5D%7D%0A%0A hash_object = hashlib.md5(filename.encode())%0A audio_filename = hash_object.hexdigest() + %22-%22 + filename%0A analysis_filename = audio_filename + '.analysis.json'%0A%0A result = json.dumps(%7B'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)%7D)%0A # also need to check that we call upload%0A assert result == handle_post(q, files, get_url, upload) # wait,this seems to fail half the time, wtf?%0A q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))%0A
b73e125fdcb12649e79aa2e108dcc019d9fffeb0
add strtol test
tests/test_strtol.py
tests/test_strtol.py
Python
0.000043
@@ -0,0 +1,1229 @@ +import nose%0Aimport angr%0Aimport subprocess%0A%0Aimport logging%0Al = logging.getLogger('angr.tests.strtol')%0A%0Aimport os%0Atest_location = str(os.path.dirname(os.path.realpath(__file__)))%0A%0A%0Adef test_strtol():%0A b = angr.Project(os.path.join(test_location, %22../../binaries/tests/x86_64/strtol_test%22))%0A%0A pg = b.factory.path_group(immutable=False)%0A%0A # find the end of main%0A expected_outputs = %7B%22base 8 worked%5Cn%22, %22base +8 worked%5Cn%22, %220x worked%5Cn%22, %22+0x worked%5Cn%22, %22base +10 worked%5Cn%22,%0A %22base 10 worked%5Cn%22, %22base -8 worked%5Cn%22, %22-0x worked%5Cn%22, %22base -10 worked%5Cn%22, %22Nope%5Cn%22%7D%0A pg.explore(find=0x400804, num_find=len(expected_outputs))%0A%0A # check the outputs%0A pipe = subprocess.PIPE%0A for f in pg.found:%0A test_input = f.state.posix.dumps(0)%0A test_output = f.state.posix.dumps(1)%0A expected_outputs.remove(test_output)%0A%0A # check the output works as expected%0A p = subprocess.Popen(%22./test2%22, stdout=pipe, stderr=pipe, stdin=pipe)%0A ret = p.communicate(test_input)%5B0%5D%0A nose.tools.assert_equal(ret, test_output)%0A%0A # check that all of the outputs were seen%0A nose.tools.assert_equal(len(expected_outputs), 0)%0A%0A%0Aif __name__ == %22__main__%22:%0A test_strtol()%0A
f5d2b17371dbd974820b9b8ab1fcdb11ad8fa646
Add in script to count duplicates.
backend/scripts/countdups.py
backend/scripts/countdups.py
Python
0
@@ -0,0 +1,406 @@ +#!/usr/bin/env python%0A%0Aimport rethinkdb as r%0A%0Aconn = r.connect('localhost', 30815, db='materialscommons')%0Arql = r.table('datafiles').filter(r.row%5B'usesid'%5D.match(%22%5E%5B0-9a-f%5D%22)).pluck('size')%0Atotal_bytes = 0%0Atotal_files = 0%0Afor doc in rql.run(conn):%0A total_bytes = total_bytes + doc%5B'size'%5D%0A total_files = total_files + 1%0A%0Aprint %22Total bytes = %25s for %25d dups%22 %25(format(total_bytes, %22,d%22), total_files)%0A
8488e7c5245758e4651e6d723f93d52f3ff54d73
Add tool for submitting jobs to AreWeCompressedYet
tools/submit_awcy.py
tools/submit_awcy.py
Python
0
@@ -0,0 +1,1122 @@ +#!/usr/bin/env python%0A%0Afrom __future__ import print_function%0A%0Aimport requests%0Aimport argparse%0Aimport os%0Aimport subprocess%0Aimport sys%0A%0Aif 'DAALA_ROOT' not in os.environ:%0A print(%22Please specify the DAALA_ROOT environment variable to use this tool.%22)%0A sys.exit(1)%0A%0Akeyfile = open('secret_key','r')%0Akey = keyfile.read().strip()%0A%0Adaala_root = os.environ%5B'DAALA_ROOT'%5D%0Aos.chdir(daala_root)%0A%0Abranch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()%0A%0Aparser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')%0Aparser.add_argument('-prefix',default=branch)%0Aargs = parser.parse_args()%0A%0Acommit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()%0Ashort = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()%0Adate = subprocess.check_output(%5B'git','show','-s','--format=%25ci',commit%5D).strip()%0Adate_short = date.split()%5B0%5D;%09%0Auser = args.prefix %0A%0Arun_id = user+'-'+date_short+'-'+short%0A%0Aprint('Creating run '+run_id)%0Ar = requests.post(%22https://arewecompressedyet.com/submit/job%22, %7B'run_id': run_id, 'commit': commit, 'key': key%7D)%0Aprint(r)%0A
0c145918d0f34bee1193eeaa0488eb369f0e843e
Use item_lookup_field for DELETE methods
eve/methods/delete.py
eve/methods/delete.py
# -*- coding: utf-8 -*- """ eve.methods.delete ~~~~~~~~~~~~~~~~~~ This module imlements the DELETE method. :copyright: (c) 2013 by Nicola Iarocci. :license: BSD, see LICENSE for more details. """ from flask import current_app as app, abort from eve.utils import config from eve.auth import requires_auth from eve.methods.common import get_document, ratelimit @ratelimit() @requires_auth('item') def delete(resource, **lookup): """Deletes a resource item. Deletion will occur only if request ETag matches the current representation of the item. :param resource: name of the resource to which the item(s) belong. :param **lookup: item lookup query. .. versionchanged:: 0.0.7 Support for Rate-Limiting. .. versionchanged:: 0.0.5 Pass current resource to ``parse_request``, allowing for proper processing of new configuration settings: `filters`, `sorting`, `paging`. .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. """ original = get_document(resource, **lookup) if not original: abort(404) app.data.remove(resource, lookup[config.ID_FIELD]) return {}, None, None, 200 @requires_auth('resource') def delete_resource(resource): """Deletes all item of a resource (collection in MongoDB terms). Won't drop indexes. Use with caution! .. versionchanged:: 0.0.4 Added the ``requires_auth`` decorator. .. versionadded:: 0.0.2 """ app.data.remove(resource) return {}, None, None, 200
Python
0
@@ -1133,22 +1133,24 @@ source, -lookup +original %5Bconfig.
837089f9195af984597522fffc8c2c6a02e73097
Create config.example.py
scripts/eurotram/config.example.py
scripts/eurotram/config.example.py
Python
0.000003
@@ -0,0 +1,173 @@ +dbname='gis'%0Auser='trolleway'%0Ahost='localhost'%0Apassword='admin'%0A%0Angw_url='http://trolleway.nextgis.com'%0Angw_resource_id=%0Angw_login = 'administrator'%0Angw_password = 'admin' %0A
4d30756e722cafa40fa449e48c967eeebc58500a
Add a manage.py command to import realm filters
zerver/management/commands/import_realm_filters.py
zerver/management/commands/import_realm_filters.py
Python
0
@@ -0,0 +1,930 @@ +from __future__ import absolute_import%0A%0Afrom django.core.management.base import BaseCommand%0A%0Afrom zerver.models import RealmFilter, get_realm%0A%0Aimport logging%0A%0Aclass Command(BaseCommand):%0A help = %22%22%22Imports realm filters to database%22%22%22%0A%0A def handle(self, *args, **options):%0A realm_filters = %7B%0A %22zulip.com%22: %5B%0A (%22#(?P%3Cid%3E%5B0-9%5D%7B2,8%7D)%22, %22https://trac.zulip.net/ticket/%25(id)s%22),%0A %5D,%0A %22mit.edu/zephyr_mirror%22: %5B%5D,%0A %7D%0A%0A for domain, filters in realm_filters.iteritems():%0A realm = get_realm(domain)%0A if realm is None:%0A logging.error(%22Failed to get realm for domain %25s%22 %25 (domain,))%0A continue%0A for filter in filters:%0A RealmFilter(realm=realm, pattern=filter%5B0%5D, url_format_string=filter%5B1%5D).save()%0A logging.info(%22Created realm filter %25s for %25s%22 %25 (filter%5B0%5D, domain))%0A
c7fa4500b22104b34b50bbcacc3b64923d6da294
Add a parser for plain text
trex/parsers.py
trex/parsers.py
Python
0.000945
@@ -0,0 +1,966 @@ +# -*- coding: utf-8 -*-%0A#%0A# (c) 2014 Bjoern Ricks %[email protected]%3E%0A#%0A# See LICENSE comming with the source of 'trex' for details.%0A#%0A%0Afrom io import TextIOWrapper%0A%0Afrom rest_framework.parsers import BaseParser%0A%0A%0Aclass PlainTextParser(BaseParser):%0A%0A media_type = %22text/plain%22%0A%0A def parse(self, stream, media_type=None, parser_context=None):%0A print %22Running PlainTextParser%22%0A charset = self.get_charset(media_type)%0A if charset:%0A stream = TextIOWrapper(stream, encoding=charset)%0A%0A return stream%0A%0A def get_charset(self, media_type):%0A if not media_type:%0A return None%0A%0A charset = None%0A msplit = media_type.split(%22 %22);%0A for m in msplit:%0A m = m.strip()%0A if %22charset%22 in m:%0A csplit = m.split(%22=%22)%0A if len(csplit) %3E 1:%0A charset = csplit%5B1%5D%0A return charset.strip().lower()%0A return None%0A
72d7e2a37bec5f7ae904ed2119dd8c30c22801fb
Add clinvar bot users
gennotes_server/migrations/0002_add_clinvar_bot_users.py
gennotes_server/migrations/0002_add_clinvar_bot_users.py
Python
0
@@ -0,0 +1,555 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.contrib.auth import get_user_model%0Afrom django.db import migrations%0A%0A%0Adef add_clinvar_bot_users(apps, schema_editor):%0A usernames = %5B'clinvar-variant-importer', 'clinvar-data-importer'%5D%0A for username in usernames:%0A get_user_model().objects.get_or_create(username=username)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('gennotes_server', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(add_clinvar_bot_users),%0A %5D%0A
68b991404ee7bd3bde1300d456a46d863ff4d54a
Set styles on current selection
chat/richtext.py
chat/richtext.py
#!/usr/bin/env python import pygtk pygtk.require('2.0') import gtk import pango import xml.sax class RichTextBuffer(gtk.TextBuffer): def __init__(self): gtk.TextBuffer.__init__(self) self.connect_after("insert-text", self.__insert_text_cb) self.__create_tags() self.active_tags = [] def apply_tag(self, tag_name): self.active_tags.append(tag_name) def unapply_tag(self, tag_name): self.active_tags.remove(tag_name) def __create_tags(self): tag = self.create_tag("bold") tag.set_property("weight", pango.WEIGHT_BOLD) tag = self.create_tag("italic") tag.set_property("style", pango.STYLE_ITALIC) def __insert_text_cb(self, widget, pos, text, length): for tag in self.active_tags: pos_end = pos.copy() pos_end.backward_chars(length) self.apply_tag_by_name(tag, pos, pos_end) class RichTextToolbar(gtk.Toolbar): def __init__(self, buf): gtk.Toolbar.__init__(self) self.buf = buf item = gtk.ToggleToolButton(gtk.STOCK_BOLD) item.connect("toggled", self.__toggle_style_cb, "bold") self.insert(item, -1) item.show() item = gtk.ToggleToolButton(gtk.STOCK_ITALIC) item.connect("toggled", self.__toggle_style_cb, "italic") self.insert(item, -1) item.show() def __toggle_style_cb(self, toggle, tag_name): if toggle.get_active(): self.buf.apply_tag(tag_name) else: self.buf.unapply_tag(tag_name) class RichTextHandler(xml.sax.handler.ContentHandler): def __init__(self, serializer, buf): self.buf = buf self.serializer = serializer self.tags = [] def startElement(self, name, attributes): tag = self.serializer.deserialize_element(name) if tag: self.tags.append(tag) def characters(self, data): start_it = it = self.buf.get_end_iter() mark = self.buf.create_mark(None, start_it, True) self.buf.insert(it, data) start_it = self.buf.get_iter_at_mark(mark) for tag in self.tags: self.buf.apply_tag_by_name(tag, start_it, it) def endElement(self, name): tag = self.serializer.deserialize_element(name) if tag: self.tags.remove(tag) class RichTextSerializer: def __init__(self): self._open_tags = [] def deserialize_element(self, el_name): if el_name == "bold": return "bold" elif el_name == "italic": return "italic" else: return None def serialize_tag_start(self, tag): if tag.get_property("name") == "bold": return "<bold>" elif tag.get_property("name") == "italic": return "<italic>" else: return "<unknown>" def serialize_tag_end(self, tag): if tag.get_property("name") == "bold": return "</bold>" elif tag.get_property("name") == "italic": return "</italic>" else: return "</unknown>" def serialize(self, buf): xml = "<richtext>" next_it = buf.get_start_iter() while not next_it.is_end(): it = next_it.copy() if not next_it.forward_to_tag_toggle(None): next_it = buf.get_end_iter() reopen_tags = [] for tag in it.get_toggled_tags(False): while 1: open_tag = self._open_tags.pop() if open_tag != tag: xml += self.serialize_tag_end(open_tag) reopen_tags.append(open_tag) else: xml += self.serialize_tag_end(tag) break for tag in reopen_tags + it.get_toggled_tags(True): self._open_tags.append(tag) xml += self.serialize_tag_start(tag) xml += buf.get_text(it, next_it) if next_it.is_end(): for tag in self._open_tags: xml += self.serialize_tag_end(tag) xml += "</richtext>" return xml def deserialize(self, xml_string, buf): parser = xml.sax.make_parser() handler = RichTextHandler(self, buf) parser.setContentHandler(handler) parser.feed(xml_string) parser.close() def test_quit(window, rich_buf): print RichTextSerializer().serialize(rich_buf) gtk.main_quit() if __name__ == "__main__": window = gtk.Window() window.set_default_size(400, 300) vbox = gtk.VBox() rich_buf = RichTextBuffer() xml_string = "<richtext><bold><italic>Hello</italic>World</bold></richtext>" RichTextSerializer().deserialize(xml_string, rich_buf) view = gtk.TextView(rich_buf) vbox.pack_start(view) view.show() toolbar = RichTextToolbar(rich_buf) vbox.pack_start(toolbar, False) toolbar.show() window.add(vbox) vbox.show() window.show() window.connect("destroy", test_quit, rich_buf) gtk.main()
Python
0
@@ -368,77 +368,266 @@ me)%0A -%0A%09def unapply_tag(self, tag_name):%0A%09%09self.active_tags.remove(tag_name +%09%09%0A%09%09%5Bstart, end%5D = self.get_selection_bounds()%0A%09%09self.apply_tag_by_name(tag_name, start, end)%0A%0A%09def unapply_tag(self, tag_name):%0A%09%09self.active_tags.remove(tag_name)%0A%0A%09%09%5Bstart, end%5D = self.get_selection_bounds()%0A%09%09self.remove_tag_by_name(tag_name, start, end )%0A%09%0A
d56515878d4a1d4d56a10426fe5d6c45de97a671
Create servos.py
gadgets/motors/servos.py
gadgets/motors/servos.py
Python
0.000022
@@ -0,0 +1,529 @@ +from gadgets.th_gpio import TH_GPIO%0Aimport time %0A%0Aclass Servo5V():%0A%09def __init__(self,pin_number=12,freq=100):%0A%09%09self.pin_number = pin_number%0A%09%09self.freq = freq%0A%09%09self.pwm = TH_GPIO().pwm_create(self.pin_number,freq=self.freq)%0A%09%09self.width = float(1000/self.freq)%0A%09%09%0A%09def set_freq(self,freq):%0A%09%09self.freq = freq%0A%09%09self.pwm.set_freq(freq)%0A%09%09self.width = float(1000/self.freq)%0A%09%09%0A%09def write(self,angle):%0A%09%09duty = float(angle)/self.width + 2.5%0A%09%09self.pwm.change(duty)%0A%09%0A%09def cleanup(self):%0A%09%09TH_GPIO().disable_pin(self.pin_number)%09%0A
0a074f3af770f049cf6f112bdc7fa5ae35c4a6dc
Create Run.py
ImageNet/Run.py
ImageNet/Run.py
Python
0.000001
@@ -0,0 +1,2004 @@ +# From https://groups.google.com/a/tensorflow.org/forum/#!topic/discuss/4xjc7tSrb18 %0A %0Afrom __future__ import absolute_import%0Afrom __future__ import division%0Afrom __future__ import print_function%0A %0Aimport os, math, time%0Aimport cv2, csv%0Aimport numpy as np%0Aimport tensorflow as tf%0Aimport CIFAR10%0A%0Afrom datetime import datetime%0Afrom PIL import Image%0A%0Aos.environ%5B%22CUDA_VISIBLE_DEVICES%22%5D = %22-1%22%0A%0A%0AHOME = '/HOME/' # /HOME/DATA/ %0Awidth = 24%0Aheight = 24%0A%0Acategories = %5B%5D%0Awith open(HOME + %22DATA/LABELS%22, 'r') as csvfile:%0A Labels = csv.reader(csvfile, delimiter=' ', quotechar='%7C')%0A for L in Labels:%0A categories.append(L) # L%5B0%5D%0A%0Afilename = HOME + %22DATA/0000.png%22%0A#im = Image.open(filename)%0A#im.save(filename, format='PNG', subsampling=0, quality=100)%0A%0A%0Awith tf.Session() as sess:%0A input_img = tf.image.decode_png(tf.read_file(filename), channels=3)%0A tf_cast = tf.cast(input_img, tf.float32)%0A float_image = tf.image.resize_image_with_crop_or_pad(tf_cast, height, width)%0A float_image = tf.image.per_image_standardization(float_image)%0A images = tf.expand_dims(float_image, 0)%0A%0A logits = CIFAR10.inference(images)%0A _, top_k_pred = tf.nn.top_k(logits, k=5)%0A%0A variable_averages = tf.train.ExponentialMovingAverage(CIFAR10.MOVING_AVERAGE_DECAY)%0A variables_to_restore = variable_averages.variables_to_restore()%0A%0A saver = tf.train.Saver(variables_to_restore)%0A ckpt = tf.train.get_checkpoint_state(HOME+'MODEL')%0A%0A if ckpt and ckpt.model_checkpoint_path:%0A print(%22Model path = %22, ckpt.model_checkpoint_path)%0A saver.restore(sess, ckpt.model_checkpoint_path)%0A else:%0A print('No checkpoint file found.')%0A exit(0)%0A%0A #init_op = tf.initialize_all_variables() %0A #sess.run(init_op) %0A%0A _, top_indices = sess.run(%5B_, top_k_pred%5D)%0A for key, value in enumerate(top_indices%5B0%5D):%0A print (%22Type %2520s%22 %25 categories%5Bvalue%5D + %22%5Ct%5Ct%22 + str(_%5B0%5D%5Bkey%5D))%0A%0A
99f2130476064062c3dd6338163010df53d60594
Bootstrap output
output.py
output.py
Python
0.999999
@@ -0,0 +1,137 @@ +import colour%0Aimport csv%0Aimport json%0A%0A# export as json, csv, textfile or output to console%0A%0A%0Adef write_data(data, format=None):%0A pass%0A
b453943f86f97e38e52af3a1b048ee93b0177df8
add a test to make sure we don't have any more missing migrations
axes/tests/test_models.py
axes/tests/test_models.py
Python
0
@@ -0,0 +1,1023 @@ +from django.test import TestCase%0A%0A%0Aclass MigrationsCheck(TestCase):%0A def setUp(self):%0A from django.utils import translation%0A self.saved_locale = translation.get_language()%0A translation.deactivate_all()%0A%0A def tearDown(self):%0A if self.saved_locale is not None:%0A from django.utils import translation%0A translation.activate(self.saved_locale)%0A%0A def test_missing_migrations(self):%0A from django.db import connection%0A from django.apps.registry import apps%0A from django.db.migrations.executor import MigrationExecutor%0A executor = MigrationExecutor(connection)%0A from django.db.migrations.autodetector import MigrationAutodetector%0A from django.db.migrations.state import ProjectState%0A autodetector = MigrationAutodetector(%0A executor.loader.project_state(),%0A ProjectState.from_apps(apps),%0A )%0A changes = autodetector.changes(graph=executor.loader.graph)%0A self.assertEqual(%7B%7D, changes)%0A
1e7aee8c5597a7ccd9f2bc8f4e05e3ae489c3bfd
Add bot.py to run as an actual bot, via pywikibot
src/bot.py
src/bot.py
Python
0
@@ -0,0 +1,3069 @@ +from app import *%0Afrom time import sleep%0Aimport pywikibot%0A%0Adef run_bot(template_param, access_token=None, site=None, max_edits=100000):%0A cached_pages = list_cache_contents('bot_cache/')%0A edits_made = 0%0A for page_name in cached_pages:%0A print(page_name)%0A cache_fname = 'bot_cache/'+to_cache_name(page_name)%0A with open(cache_fname, 'r') as f:%0A page_json = json.load(f)%0A if run_bot_on_page(page_json, template_param, access_token=access_token, site=site):%0A edits_made += 1%0A sleep(3)%0A if edits_made %3E= max_edits:%0A return%0A%0Adef run_bot_on_page(proposed_edits, template_param, access_token=None, site=None):%0A page_name = proposed_edits%5B'page_name'%5D%0A%0A for edit in proposed_edits%5B'proposed_edits'%5D:%0A edit_hash = edit%5B'orig_hash'%5D%0A change = edit%5B'proposed_change'%5D%0A match = re.findall(r'%5E' + template_param, change)%0A if match:%0A try:%0A app.logger.info('Attempting change on %7B%7D: %7B%7D'.format(page_name, change))%0A change_made = perform_bot_edit(page_name, '%5B%5BWikipedia:OABOT%7COpen access bot%5D%5D: add %25s identifier to citation with #oabot.' %25 match%5B0%5D, edit_hash, change, access_token=access_token, site=site)%0A if change_made:%0A return True%0A except ValueError:%0A app.logger.exception('perform_bot_edit failed on %7B%7D'.format(page_name))%0A return False%0A%0Adef perform_bot_edit(page_name, summary, template_hash, proposed_addition, access_token=None, site=None):%0A # Get the page%0A text = main.get_page_over_api(page_name)%0A%0A # Perform each edit%0A new_text, change_made = make_new_wikicode_for_bot(text, template_hash, proposed_addition, page_name)%0A%0A # Save the page%0A if change_made:%0A if site:%0A page = pywikibot.Page(site, page_name)%0A page.text = new_text%0A page.save(summary)%0A else:%0A edit_wiki_page(page_name, new_text, access_token, summary, bot='yes')%0A%0A%0A # Remove the cache%0A cache_fname = %22bot_cache/%22+to_cache_name(page_name)%0A if os.path.isfile(cache_fname):%0A os.remove(cache_fname)%0A%0A return change_made%0A%0Adef make_new_wikicode_for_bot(text, template_hash, proposed_addition, page_name):%0A wikicode = mwparserfromhell.parse(text)%0A change_made = False%0A for template in wikicode.filter_templates():%0A edit = main.TemplateEdit(template, page_name)%0A if edit.orig_hash == template_hash:%0A try:%0A edit.update_template(proposed_addition)%0A change_made = True%0A except ValueError:%0A app.logger.exception('update_template failed on %7B%7D'.format(page_name))%0A pass # TODO report to the user%0A return unicode(wikicode), change_made%0A%0A%0Aif __name__ == '__main__':%0A import sys%0A template_param = sys.argv%5B1%5D%0A app.logger.info(%22Starting additions for parameter: %7B%7D%22.format(template_param))%0A site = pywikibot.Site()%0A site.login()%0A run_bot(template_param, site=site)%0A
eb517e5b323ea183571b9d4967f46821729dd3e7
add part 6
part_6.py
part_6.py
Python
0.000012
@@ -0,0 +1,414 @@ +# Let's try to draw a point moving on a line%0A# To make things simple, we are going back to 1D%0A%0Apos = 1%0Avelo = 1%0A%0A# Since there are multiple positions at a time, we can represent as a list%0Aline_1 = %5B' ', ' ', ' '%5D%0Aline_2 = 3*%5B' '%5D%0A%0A# Note how they are equal statements%0Aprint(line_1, line_2)%0A%0A# If we wanted to print the line without the list notation%0A# We can join an empty string with a list%0Aprint(%22%22.join(line_1))
98a6fd1b1d095d6babc55c5d415c2450743fdba6
Add antibody audits
src/encoded/audit/antibody_lot.py
src/encoded/audit/antibody_lot.py
Python
0
@@ -0,0 +1,2355 @@ +from snovault import (%0A AuditFailure,%0A audit_checker,%0A)%0Afrom .conditions import rfa%0A%0A%0A@audit_checker('antibody_lot', frame=%5B'characterizations'%5D,%0A condition=rfa('ENCODE3', 'modERN'))%0Adef audit_antibody_missing_characterizations(value, system):%0A '''%0A Check to see what characterizations are lacking for each antibody,%0A for the cell lines we know about.%0A '''%0A if not value%5B'characterizations'%5D:%0A detail = '%7B%7D '.format(value%5B'@id'%5D) + %5C%0A 'does not have any supporting characterizations submitted.'%0A yield AuditFailure('no characterizations submitted', detail,%0A level='NOT_COMPLIANT')%0A return%0A%0A primary_chars = %5B%5D%0A secondary_chars = %5B%5D%0A num_compliant_primary = 0%0A compliant_secondary = False%0A for char in value%5B'characterizations'%5D:%0A if 'primary_characterization_method' in char:%0A primary_chars.append(char)%0A if char%5B'status'%5D in %5B'compliant', 'exempt from standards'%5D:%0A num_compliant_primary += 1%0A if 'secondary_characterization_method' in char:%0A secondary_chars.append(char)%0A if char%5B'status'%5D in %5B'compliant', 'exempt from standards'%5D:%0A compliant_secondary = True%0A%0A if not primary_chars:%0A detail = '%7B%7D '.format(value%5B'@id'%5D) + %5C%0A 'does not have any primary characterizations submitted.'%0A yield AuditFailure('no primary characterizations', detail,%0A level='NOT_COMPLIANT')%0A%0A if not secondary_chars:%0A detail = '%7B%7D '.format(value%5B'@id'%5D) + %5C%0A 'does not have any secondary characterizations submitted.'%0A yield AuditFailure('no secondary characterizations', detail,%0A level='NOT_COMPLIANT')%0A%0A if len(primary_chars) != num_compliant_primary:%0A detail = '%7B%7D '.format(value%5B'@id'%5D) + %5C%0A 'needs compliant primary in one or more cell types.'%0A yield AuditFailure('need compliant primaries', detail,%0A level='NOT_COMPLIANT')%0A%0A if secondary_chars and not compliant_secondary:%0A detail = '%7B%7D '.format(value%5B'@id'%5D) + %5C%0A 'needs a compliant secondary characterization.'%0A yield AuditFailure('need compliant secondary', detail,%0A level='NOT_COMPLIANT')%0A return%0A
e8560c42e3ae73f1753073b8ad6aef7d564e6d65
Implement basic active monitoring algorithm
Host/original.py
Host/original.py
Python
0.000018
@@ -0,0 +1,1067 @@ +import sys%0Afrom functools import reduce%0A%0AtempVmId = -1%0A%0Adef enhancedActiveVMLoadBalancer(vmStateList, currentAllocationCounts):%0A '''%0A vmStateList: Dict%3CvmId, vmState%3E%0A currentAllocationCounts: Dict%3CvmId, currentActiveAllocationCount%3E%0A '''%0A%0A global tempVmId%0A vmId = -1%0A%0A totalAllocations = reduce(lambda x, y: x + y, currentAllocationCounts)%0A%0A if(totalAllocations %3C len(vmStateList)):%0A for i, vm in enumerate(vmStateList):%0A if(currentAllocationCounts%5Bi%5D == 0):%0A vmId = i%0A break%0A else:%0A minCount = sys.maxint%0A for i, vm in enumerate(vmStateList):%0A curCount = currentAllocationCounts%5Bi%5D%0A%0A if(curCount %3C minCount):%0A if(i != tempVmId):%0A vmId = i%0A break%0A%0A tempVmId = vmId%0A print(%22Returning, %22, vmId)%0A return vmId%0A%0A%0AenhancedActiveVMLoadBalancer(%5B%0A %7B'cpu': 10, 'mem': 10%7D,%0A %7B'cpu': 17, 'mem': 40%7D,%0A %7B'cpu': 40, 'mem': 20%7D,%0A %7B'cpu': 80, 'mem': 15%7D%0A%5D, %5B1, 4, 1, 1%5D)%0A
0a5e419dd91317d3a9d755cc5e8ee32c3a68d4af
Fix dates in show pending notifications
src/ggrc/notification/__init__.py
src/ggrc/notification/__init__.py
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] from collections import defaultdict from freezegun import freeze_time from datetime import date from ggrc.extensions import get_extension_modules from ggrc.models import Notification from ggrc.utils import merge_dict from ggrc import db from sqlalchemy import and_ class NotificationServices(): def __init__(self): self.services = self.all_notifications() def all_notifications(self): services = {} for extension_module in get_extension_modules(): contributions = getattr( extension_module, 'contributed_notifications', None) if contributions: if callable(contributions): contributions = contributions() services.update(contributions) return services def get_service_function(self, name): if name not in self.services: raise ValueError("unknown service name: %s" % name) return self.services[name] def call_service(self, name, pn): service = self.get_service_function(name) return service(pn) services = NotificationServices() def get_notification_data(notifications): if not notifications: return {} aggregate_data = {} def merge_into(destination, source): if destination is None: return source for pn in notifications: data = services.call_service(pn.object_type.name, pn) aggregate_data = merge_dict(aggregate_data, data) return aggregate_data def get_pending_notifications(): notifications = db.session.query(Notification).filter( Notification.sent_at == None).all() # noqa notif_by_day = defaultdict(list) for notification in notifications: notif_by_day[notification.send_on].append(notification) data = {} for day, notif in notif_by_day.iteritems(): with freeze_time(day): data[day] = get_notification_data(notif) return notifications, data def get_todays_notifications(): notifications = db.session.query(Notification).filter( and_(Notification.send_on <= date.today(), Notification.sent_at == None # noqa )).all() return notifications, get_notification_data(notifications) def generate_notification_email(data): pass def dispatch_notifications(): pass
Python
0
@@ -320,16 +320,26 @@ ort date +, datetime %0Afrom gg @@ -1909,16 +1909,78 @@ ta = %7B%7D%0A + today = datetime.combine(date.today(), datetime.min.time())%0A for da @@ -2013,24 +2013,58 @@ teritems():%0A + current_day = max(day, today)%0A with fre @@ -2072,16 +2072,24 @@ ze_time( +current_ day):%0A @@ -2097,16 +2097,24 @@ data%5B +current_ day%5D = g
b7e09bb39aa6161215799960bd5fda33a882e40f
fix docstring
01_basics/03_advanced_expressions/01_basic_indexing_soln.py
01_basics/03_advanced_expressions/01_basic_indexing_soln.py
Python
0.000018
@@ -0,0 +1,547 @@ +# Fill in the TODOs in this exercise, then run the script to see if your%0A# solution works.%0Aimport numpy as np%0Aimport theano.tensor as T%0A%0Adef increment_odd(x):%0A %22%22%22%0A x: a Theano vector%0A Returns:%0A y: a Theano vector equal to x, but with all odd-numbered elements%0A incremented by 1.%0A %22%22%22%0A%0A raise NotImplementedError(%22TODO: implement the function.%22)%0A%0Aif __name__ == %22__main__%22:%0A x = T.vector()%0A xv = np.zeros((4,), dtype=x.dtype)%0A yv = increment_odd(x).eval(%7Bx:xv%7D)%0A assert np.allclose(yv, np.array(%5B0., 1., 0., 1.%5D))%0A
787b46749a26f8078c1ac4e914aea7fbd0ced8c6
Add test for checking if privatecode in journey's is unique per day
bin/test.py
bin/test.py
Python
0
@@ -0,0 +1,1432 @@ +import helper%0Aimport logging%0Aimport psycopg2%0Afrom settings.const import database_connect%0A%0Aconn = psycopg2.connect(database_connect)%0Acur = conn.cursor()%0Acur.execute(%22%22%22%0ASELECT j.id,jp.operator_id,j.operator_id FROM%0A(select journeypatternref,count(distinct pointorder) as points from pointinjourneypattern group by journeypatternref) as pattern,%0A(select timedemandgroupref,count(distinct pointorder) as timepoints from pointintimedemandgroup group by timedemandgroupref) as timepattern,%0Ajourney as j LEFT JOIN journeypattern as jp ON (j.journeypatternref = jp.id)%0AWHERE%0Aj.journeypatternref = pattern.journeypatternref AND%0Aj.timedemandgroupref = timepattern.timedemandgroupref AND %0Apoints != timepoints;%0A%22%22%22)%0Arows = cur.fetchall()%0Acur.close()%0AtimegroupsValid = len(rows) == 0%0Aassert timegroupsValid%0A%0Acur.execute(%22%22%22%0ASELECT links.operator_id,rechts.operator_id FROM %0A(SELECT j.id,j.operator_id,j.privatecode,validdate FROM journey as j LEFT JOIN availabilityconditionday USING (availabilityconditionref) where %0Aisavailable = true) as links,%0A(SELECT j.id,j.operator_id,j.privatecode,validdate FROM journey as j LEFT JOIN availabilityconditionday USING (availabilityconditionref) where %0Aisavailable = true) as rechts%0AWHERE links.id != rechts.id AND links.validdate = rechts.validdate AND links.privatecode = rechts.privatecode%0A%22%22%22)%0Arows = cur.fetchall()%0Acur.close()%0AduplicateTripidentifiers = len(rows) == 0%0Aassert uniqueTripidentifiers%0A
3693b1aea769af1e0fbe31007a00f3e33bcec622
Add function to solve two pair sum
aids/sorting_and_searching/pair_sum.py
aids/sorting_and_searching/pair_sum.py
Python
0.002605
@@ -0,0 +1,934 @@ +'''%0AGiven an integer array, output all pairs that sum up to a specific value k%0A%0A'''%0A%0Afrom binary_search import binary_search_iterative%0A%0Adef pair_sum_sorting(arr, k):%0A%09'''%0A%09Using sorting - O(n logn)%0A%0A%09'''%0A%09number_of_items = len(arr)%0A%09if number_of_items %3C 2:%0A%09%09return %0A%09arr.sort()%0A%09for index, item in enumerate(arr):%0A%09%09index_pair = binary_search_iterative(arr, index, number_of_items - 1, k - item)%0A%09%09if index_pair and index_pair %3E index:%0A%09%09%09print item, arr%5Bindex_pair%5D%0A%0Adef pair_sum_set(arr, k):%0A%09'''%0A%09Using set - O(n) (time - average case), O(n) (space)%0A%0A%09'''%0A%09if len(arr) %3C 2:%0A%09%09return%0A%09seen = set()%0A%09output = set()%0A%09for item in arr:%0A%09%09target = k - item%0A%09%09if target not in seen:%0A%09%09%09seen.add(target)%0A%09%09else:%0A%09%09%09output.add(item, target) # print item, target%0A%09%09%09# for output with non-duplicate i.e. (1,3) and (3,1) are the samw thing%0A%09%09%09# output.add((min(num, target), max(num, target)))%0A%09print '%5Cn'.join(%5Bstr(item) for item in output%5D)
72cfd9b52e860aaaca05e7ef7941d0b4e17ad95f
Add vocab_word.py
vocab_word.py
vocab_word.py
Python
0.998753
@@ -0,0 +1,2216 @@ +import cv2%0Aimport numpy as np%0A%0Afrom os import listdir%0Afrom os.path import isfile, join%0Afrom numpy import *%0Afrom scipy.cluster.vq import kmeans,vq%0A%0Adef buildVocabulary(path,k,grid_m,grid_n):%0A files = %5B f for f in listdir(path) if isfile(join(path,f)) %5D%0A dict_vocab = array(%5B%5D)%0A for i in range(0,grid_m):%0A for j in range(0,grid_n):%0A for f in files:%0A total_desc = array(%5B%5D)%0A img = cv2.imread(path+f)%0A desc = localFeature(img,grid_m,grid_n,i,j)%0A if len(desc.shape) == 1:%0A desc = array(%5Bdesc%5D)%0A if len(total_desc) == 0:%0A total_desc = desc%0A else:%0A total_desc = np.append(total_desc,desc,axis = 0)%0A vocab,dist = kmeans(total_desc,k) # k is the seed number%0A if len(dict_vocab) == 0:%0A dict_vocab = %5Bvocab%5D%0A else:%0A dict_vocab = np.append(dict_vocab,%5Bvocab%5D,axis = 0)%0A%0Adef findWord(dict_vocab,path,grid_m,grid_n):%0A files = %5B f for f in listdir(path) if isfile(join(path,f)) %5D%0A word_hist = array(%5B%5D)%0A for f in files:%0A img = cv2.imread(path+f)%0A line_hist = array(%5B%5D)%0A for i in range(0,grid_m):%0A for j in range(0,grid_n):%0A desc = localFeature(img,grid_m,grid_n,i,j)%0A hist = buildWordHist(desc,dict_vocab%5Bgrid_n*i+j%5D)%0A if len(line_hist) == 0:%0A line_hist = hist%0A else%0A line_hist = np.hstack((line_hist,hist))%0A if len(word_hist) == 0:%0A word_hist = line_hist%0A else:%0A word_hist = np.vstack((word_hist,line_hist))%0A return word_hist%0A%0Adef buildWordHist(desc,dict_part):%0A index,temp = vq(desc,dict_part)%0A k = dict_part.shape%5B0%5D%0A hist,bucket = np.histogram(index,bins = range(k+1))%0A return hist%0A%0Adef main():%0A path = '/home/alicelee0606/helloflask/'%0A d_path = path+'database/'%0A t_path = path+'testcase/'%0A k = 180%0A grid_m = 1%0A grid_n = 1%0A dict_vocab = buildVocabulary(d_path,k,grid_m,grid_n)%0A d_hist = findWord(dict_vocab,d_path,grid_m,grid_n)%0A t_hist = findWord(dict_vocab,t_path,grid_m,grid_n)%0A
9a67d63650b751c7b876f248bb3d82e619b37725
Add new script to create a list of words from frequencies
frequenciesToWords.py
frequenciesToWords.py
Python
0
@@ -0,0 +1,1727 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A# Spell corrector - http://www.chiodini.org/%0A# Copyright %C2%A9 2015 Luca Chiodini %[email protected]%3E%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Aimport argparse%0Aimport codecs%0Aimport sys%0A%0A%0Adef main():%0A%0A parser = argparse.ArgumentParser(%0A description=%22Script to get pure words from unigrams frequencies.%22)%0A parser.add_argument(%22-f%22, %22--file%22, help=%22source file to be processed%22,%0A required=True)%0A parser.add_argument(%22-o%22, %22--output%22, help=%22output file with results%22,%0A required=True)%0A%0A args = parser.parse_args()%0A%0A words = set()%0A%0A # Process input file and save keys.%0A with codecs.open(args.file, 'r', 'utf8') as f:%0A idx = 0%0A for line in f:%0A if idx %3E 0: # skip first line (header)%0A vals = line.rsplit(' ', 1)%0A words.add(vals%5B0%5D)%0A idx += 1%0A%0A # Write keys to output file.%0A with codecs.open(args.output, 'w', 'utf8') as f:%0A for w in words:%0A f.write(%22%25s%5Cn%22 %25 w)%0A%0Aif __name__ == '__main__':%0A sys.exit(main())%0A
50e9418ade654360ce2feb2d22e4dae3eca691ea
make copytree can copy recursively
simiki/utils.py
simiki/utils.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function, unicode_literals, absolute_import import os import os.path import shutil import errno import logging logger = logging.getLogger(__name__) COLOR_CODES = { "reset": "\033[0m", "black": "\033[1;30m", "red": "\033[1;31m", "green": "\033[1;32m", "yellow": "\033[1;33m", "blue": "\033[1;34m", "magenta": "\033[1;35m", "cyan": "\033[1;36m", "white": "\033[1;37m", "bgred": "\033[1;41m", "bggrey": "\033[1;100m", } def color_msg(color, msg): return COLOR_CODES[color] + msg + COLOR_CODES["reset"] def check_extension(filename): """Check if the file extension is in the allowed extensions The `fnmatch` module can also get the suffix: patterns = ["*.md", "*.mkd", "*.markdown"] fnmatch.filter(files, pattern) """ allowed_extensions = (".md", ".mkd", ".mdown", ".markdown") return os.path.splitext(filename)[1] in allowed_extensions # def copytree(src, dst): # try: # shutil.copytree(src, dst) # except OSError as exc: # python >2.5 # if exc.errno == errno.ENOTDIR: # shutil.copy(src, dst) # else: raise def copytree(src, dst, symlinks=False, ignore=None): """Copy from source directory to destination""" # TODO: OSError: [Errno 17] File exists: '/home/tankywoo/simiki/html/css' if not os.path.exists(dst): os.makedirs(dst) for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d) def emptytree(directory): """Delete all the files and dirs under specified directory""" for p in os.listdir(directory): fp = os.path.join(directory, p) if not isinstance(fp, unicode): fp = unicode(fp, "utf-8") if os.path.isdir(fp): try: shutil.rmtree(fp) logger.info("Delete directory %s", fp) except OSError as e: logger.error("Unable to delete directory %s: %s", fp, unicode(e)) elif os.path.isfile(fp): try: logging.info("Delete file %s", fp) os.remove(fp) except OSError as e: logger.error("Unable to delete file %s: %s", fp, unicode(e)) else: logger.error("Unable to delete %s, unknown filetype", fp) def mkdir_p(path): """Make parent directories as needed, like `mkdir -p`""" try: os.makedirs(path) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def listdir_nohidden(path): """List not hidden files or directories under path""" for f in os.listdir(path): if isinstance(f, str): f = unicode(f, "utf-8") if not f.startswith('.'): yield f if __name__ == "__main__": print(color_msg("black", "Black")) print(color_msg("red", "Red")) print(color_msg("green", "Green")) print(color_msg("yellow", "Yellow")) print(color_msg("blue", "Blue")) print(color_msg("magenta", "Magenta")) print(color_msg("cyan", "Cyan")) print(color_msg("white", "White")) print(color_msg("bgred", "Background Red")) print(color_msg("bggrey", "Background Grey"))
Python
0.000004
@@ -1587,39 +1587,32 @@ s):%0A -shutil. copytree(s, d, s
875fd0f57b1cbead04bd60b7d8c19cd1f106595a
add example python server
Server/server.py
Server/server.py
Python
0
@@ -0,0 +1,2305 @@ +#!/usr/bin/env python%0Aimport tornado.ioloop%0Aimport tornado.web%0Aimport tornado.websocket%0A%0Afrom tornado.options import define, options, parse_command_line%0A%0Aimport os%0Aimport json%0Aimport uuid%0A%0Adefine(%22port%22, default=8888, help=%22run on the given port%22, type=int) %0A %0Aclients = set()%0Ametadatas = dict()%0A%0Aclass DiscoveryClient():%0A connection = None%0A relations = set()%0A%0A def __init__(self, c):%0A self.connection = c%0A%0A%0Aclass WebSocketHandler(tornado.websocket.WebSocketHandler):%0A def open(self):%0A clients.add(DiscoveryClient(self))%0A return None%0A%0A def on_close(self):%0A for client in clients: %0A if client.connection == self:%0A clients.remove(client)%0A break%0A%0A def on_message(self, msg):%0A payload = json.loads(msg)%0A%0A # decompose json%0A body = payload%5B%22body%22%5D%0A header = payload%5B%22header%22%5D%0A%0A # handle %60absence%60%0A if header%5B%22type%22%5D == %22absence%22:%0A print %22Recived %60absence%60 message: %25s%22 %25 (body%5B%22id%22%5D)%0A for client in clients:%0A if client.connection == self:%0A client.relations.remove(body%5B%22id%22%5D)%0A%0A # handle %60presence%60%0A if header%5B%22type%22%5D == %22presence%22:%0A print %22Recived %60presence%60 message: %25s%22 %25 (body%5B%22id%22%5D)%0A payload = json.dumps(%7B%22header%22: %7B%22type%22: %22metadata%22%7D, %22body%22: metadatas%5Bbody%5B%22id%22%5D%5D%7D)%0A for client in clients:%0A if client.connection == self:%0A client.relations.add(body%5B%22id%22%5D)%0A%0A # send metadata user to client%0A client.connection.write_message(payload, binary=True)%0A %0A # handle %60metadata%60%0A if header%5B%22type%22%5D == %22metadata%22:%0A print %22Recived %60metadata%60 message: %25s%22 %25 (body)%0A metadatas%5Bbody%5B%22id%22%5D%5D = body%0A payload = json.dumps(%7B%22header%22: %7B%22type%22: %22metadata%22%7D, %22body%22: body%7D)%0A%0A for client in clients:%0A client.connection.ws_connection.write_message(payload, binary=True)%0A%0A%0A%0A%0Aapp = tornado.web.Application(%5B%0A (r'/chat', WebSocketHandler)%0A%5D)%0A%0Aif __name__ == '__main__':%0A parse_command_line()%0A%0A print %22Listening on port %25i%22 %25 (options.port)%0A app.listen(options.port)%0A tornado.ioloop.IOLoop.instance().start()%0A%0A
4535d6c41e17031b943e7016fc7de6f76b890f17
Put the test into the correct directory.
test/lib/test_inputsource.py
test/lib/test_inputsource.py
Python
0.999992
@@ -0,0 +1,881 @@ +########################################################################%0A# test/xslt/test_inputsource.py%0A%0Aimport os%0Afrom amara.lib import inputsource, iri, treecompare%0A%0Amodule_dir = os.path.dirname(os.path.abspath(__file__))%0A%0Arlimit_nofile = 300%0Atry:%0A import resource%0Aexcept ImportError:%0A pass%0Aelse:%0A rlimit_nofile = resource.getrlimit(resource.RLIMIT_NOFILE)%5B0%5D + 10%0A%0Adef test_many_inputsources():%0A assert rlimit_nofile %3C 20000, %22is your file limit really that large?%22%0A%0A # Amara's inputsource consumes a filehandle, in the 'stream' attribute%0A # See what happens if we run out of file handles.%0A sources = %5B%5D%0A filename = os.path.join(module_dir, %22borrowed%22, %22da_20000714_02.xslt%22)%0A for i in range(rlimit_nofile):%0A try:%0A sources.append(inputsource(filename))%0A except:%0A print %22Failed after%22, i, %22files%22%0A raise%0A
5bbb2a994397374356964b1db4c23b6b8ff5c848
Add the 'version' variable.
TODO/__init__.py
TODO/__init__.py
Python
0.001248
@@ -0,0 +1,1610 @@ +# The MIT License%0A#%0A# Copyright (c) 2016 Jeremie DECOCK (http://www.jdhp.org)%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A# THE SOFTWARE.%0A%0A# PEP0440 compatible formatted version, see:%0A# https://www.python.org/dev/peps/pep-0440/%0A#%0A# Generic release markers:%0A# X.Y%0A# X.Y.Z # For bugfix releases %0A# %0A# Admissible pre-release markers:%0A# X.YaN # Alpha release%0A# X.YbN # Beta release %0A# X.YrcN # Release Candidate %0A# X.Y # Final release%0A#%0A# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.%0A# 'X.Y.dev0' is the canonical version of 'X.Y.dev'%0A#%0A__version__ = '0.1.dev0'%0A%0A__all__ = %5B'TODO'%5D%0A
fcd96cb766f3211a185a3aadbd7c8dde795134ca
Add ILCommand class
il_commands.py
il_commands.py
Python
0
@@ -0,0 +1,2067 @@ +%22%22%22Classes representing IL commands, including procedures to generate asm code%0Afrom a given IL command.%0A%0A%22%22%22%0A%0Aimport spots%0A%0Aclass ILCommand:%0A %22%22%22Base interface for all IL commands%22%22%22%0A def __init__(self):%0A raise NotImplementedError%0A%0A def input_values(self):%0A %22%22%22Return set of values read by this command.%22%22%22%0A raise NotImplementedError%0A%0A def output_values(self):%0A %22%22%22Return set of values modified by this command.%22%22%22%0A raise NotImplementedError%0A%0A def clobber_spots(self):%0A %22%22%22Return set of spots that are clobbered by this command.%22%22%22%0A raise NotImplementedError%0A%0A def make_asm(self, spotmap, asm_code):%0A %22%22%22Generate assembly code for this command. Generated assembly can read%0A any of the values returned from input_values, may overwrite any values%0A returned from output_values, and may change the value of any spots%0A returned from clobber_spots without worry.%0A%0A asm_code (ASMCode) - Object to which to save generated code.%0A spotmap - Dictionary mapping each input/output value to a spot.%0A%0A %22%22%22%0A raise NotImplementedError%0A%0Aclass AddCommand:%0A %22%22%22ADD - adds arg1 and arg2, then saves to output%22%22%22%0A def __init__(self, output, arg1, arg2):%0A self.output = output%0A self.arg1 = arg1%0A self.arg2 = arg2%0A%0A def input_values(self):%0A return %7Bself.arg1, self.arg2%7D%0A%0A def output_values(self):%0A return %7Bself.output%7D%0A%0A def clobber_spots(self):%0A # Current implementation lazily clobbers RAX always.%0A return set(spots.RAX)%0A%0A def make_asm(self, spotmap, asm_code):%0A arg1_asm = spotmap%5Bself.arg1%5D.asm_str(self.arg1.ctype.size)%0A arg2_asm = spotmap%5Bself.arg2%5D.asm_str(self.arg2.ctype.size)%0A output_asm = spotmap%5Bself.output%5D.asm_str(self.output.ctype.size)%0A rax_asm = spots.RAX.asm_str(self.arg1.ctype.size)%0A%0A asm_code.add_command(%22mov%22, rax_asm, arg1_asm)%0A asm_code.add_command(%22add%22, rax_asm, arg2_asm)%0A asm_code.add_command(%22mov%22, output_asm, rax_asm)%0A
0cf909cce9ba47f34297e87ae800f49b7ea4e18a
Correct ci failed tests
homeassistant/components/thermostat/radiotherm.py
homeassistant/components/thermostat/radiotherm.py
""" homeassistant.components.thermostat.radiotherm ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Adds support for Radio Thermostat wifi-enabled home thermostats """ import logging from homeassistant.components.thermostat import (ThermostatDevice, STATE_COOL, STATE_IDLE, STATE_HEAT) from homeassistant.const import (CONF_HOST, CONF_NAME, TEMP_FAHRENHEIT) from urllib.error import URLError REQUIREMENTS = ['radiotherm'] def setup_platform(hass, config, add_devices, discovery_info=None): """ Sets up the Radio Thermostat. """ logger = logging.getLogger(__name__) try: import radiotherm except ImportError: logger.exception( "Error while importing dependency radiotherm. " "Did you maybe not install the radiotherm dependency?") return host = config.get(CONF_HOST) name = config.get(CONF_NAME) if host is None: logger.error("host not defined in config.") return try: tstat = radiotherm.get_thermostat(host) except URLError as err: logger.Exception( "Unable to connect to Radio Thermostat") return add_devices([RadioThermostat(tstat, name)]) class RadioThermostat(ThermostatDevice): """ Represent a Radio Thermostat. """ def __init__(self, device, name=None): self.device = device if name: self.set_name(name) @property def name(self): """ Returns the name of the Radio Thermostat. """ return self.device.name['raw'] @property def unit_of_measurement(self): """ Unit of measurement this thermostat expresses itself in. """ return TEMP_FAHRENHEIT @property def device_state_attributes(self): """ Returns device specific state attributes. """ # Move these to Thermostat Device and make them global return { "humidity": None, "target_humidity": None, "fan": self.device.fmode['human'], "mode": self.device.tmode['human'] } @property def current_temperature(self): """ Returns the current temperature. """ return self.device.temp['raw'] @property def operation(self): """ Returns current operation. head, cool idle """ if self.device.tmode['human'] == 'Cool': return STATE_COOL elif self.device.tmode['human'] == 'Heat': return STATE_HEAT else: return STATE_IDLE @property def target_temperature(self): """ Returns the temperature we try to reach. """ if self.operation == STATE_COOL: temp = self.device.t_cool['raw'] elif self.operation == STATE_HEAT: temp = self.device.t_heat['raw'] return round(temp, 1) def set_temperature(self, temperature): """ Set new target temperature """ if self.operation == STATE_COOL: self.device.t_cool = temperature elif self.operation == STATE_HEAT: self.device.t_heat def set_name(self, name): """ Set thermostat name """ self.device.name = name
Python
0.000001
@@ -1076,15 +1076,8 @@ rror - as err :%0A @@ -1089,17 +1089,17 @@ logger. -E +e xception @@ -3075,16 +3075,30 @@ e.t_heat + = temperature %0A%0A de
732898dc4858ae5cfc7eac3e470069ac702f6c12
Add a command for deactivating a generation
mapit/management/commands/mapit_generation_deactivate.py
mapit/management/commands/mapit_generation_deactivate.py
Python
0.000004
@@ -0,0 +1,1440 @@ +# This script deactivates a particular generation%0A%0Afrom optparse import make_option%0Afrom django.core.management.base import BaseCommand%0Afrom mapit.models import Generation%0A%0Aclass Command(BaseCommand):%0A help = 'Deactivate a generation'%0A args = '%3CGENERATION-ID%3E'%0A option_list = BaseCommand.option_list + (%0A make_option('--commit', action='store_true', dest='commit',%0A help='Actually update the database'),%0A make_option('--force', action='store_true', dest='force',%0A help='Force deactivation, even if it would leave no active generations'))%0A%0A def handle(self, generation_id, **options):%0A generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))%0A if not generation_to_deactivate.active:%0A raise CommandError, %22The generation %25s wasn't active%22 %25 (generation_id,)%0A active_generations = Generation.objects.filter(active=True).count()%0A if active_generations %3C= 1 and not options%5B'force'%5D:%0A raise CommandError, %22You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force%22%0A generation_to_deactivate.active = False%0A if options%5B'commit'%5D:%0A generation_to_deactivate.save()%0A print %22%25s - deactivated%22 %25 generation_to_deactivate%0A else:%0A print %22%25s - not deactivated, dry run%22 %25 generation_to_deactivate%0A
404ede26f42a29520f845c3df1650cf70080de5f
Fix Windows compatibility (again)
botogram/bot.py
botogram/bot.py
""" botogram.bot The actual bot application base Copyright (c) 2015 Pietro Albini <[email protected]> Released under the MIT license """ import re import logbook import uuid import requests.exceptions from . import api from . import objects from . import runner from . import defaults from . import components from . import utils from . import frozenbot from . import shared from . import tasks class Bot(frozenbot.FrozenBot): """A botogram-made bot""" def __init__(self, api_connection): self.logger = logbook.Logger('botogram bot') self.api = api_connection self.about = "" self.owner = "" self.hide_commands = ["start"] self.before_help = [] self.after_help = [] self.process_backlog = False self._lang = "" self._lang_inst = None # Set the default language to english self.lang = "en" self._components = [] self._main_component = components.Component("") self._main_component_id = self._main_component._component_id # Setup shared memory self._shared_memory = shared.SharedMemory() # Register bot's shared memory initializers inits = self._main_component._get_shared_memory_inits() maincompid = self._main_component._component_id self._shared_memory.register_inits_list(maincompid, inits) # Setup the scheduler self._scheduler = tasks.Scheduler() self._bot_id = str(uuid.uuid4()) self.use(defaults.DefaultComponent()) self.use(self._main_component, only_init=True) # Fetch the bot itself's object try: self.itself = self.api.call("getMe", expect=objects.User) except api.APIError as e: self.logger.error("Can't connect to Telegram!") if e.error_code == 401: self.logger.error("The API token seems to be invalid.") else: self.logger.error("Response from Telegram: %s" % e.description) exit(1) except requests.exceptions.ConnectionError: self.logger.error("Can't reach Telegram servers! Are you sure " "you're connected to the internet?") exit(1) # This regex will match all commands pointed to this bot self._commands_re = re.compile(r'^\/([a-zA-Z0-9_]+)(@' + self.itself.username+r')?( .*)?$') def __reduce__(self): # Use the standard __reduce__ return object.__reduce__(self) def __setattr__(self, name, value): # Use the standard __setattr__ return object.__setattr__(self, name, value) def before_processing(self, func): """Register a before processing hook""" return self._main_component.add_before_processing_hook(func) def process_message(self, func): """Add a message processor hook""" return self._main_component.add_process_message_hook(func) def message_equals(self, string, ignore_case=True): """Add a message equals hook""" def __(func): self._main_component.add_message_equals_hook(string, func, ignore_case) return func return __ def message_contains(self, string, ignore_case=True, multiple=False): """Add a message contains hook""" def __(func): self._main_component.add_message_contains_hook(string, func, ignore_case, multiple) return func return __ def message_matches(self, regex, flags=0, multiple=False): """Add a message matches hook""" def __(func): self._main_component.add_message_matches_hook(regex, func, flags, multiple) return func return __ def command(self, name): """Register a new command""" def __(func): self._main_component.add_command(name, func, _from_main=True) return func return __ def timer(self, interval): """Register a new timer""" def __(func): self._main_component.add_timer(interval, func) return func return __ def init_shared_memory(self, func): """Register a shared memory's initializer""" self._main_component.add_shared_memory_initializer(func) return func def use(self, *components, only_init=False): """Use the provided components in the bot""" for component in components: if not only_init: self.logger.debug("Component %s just loaded into the bot" % component.component_name) self._components.append(component) # Register initializers for the shared memory compid = component._component_id inits = component._get_shared_memory_inits() self._shared_memory.register_inits_list(compid, inits) # Register tasks self._scheduler.register_tasks_list(component._get_timers()) def process(self, update): """Process an update object""" # Updates are always processed in a frozen instance # This way there aren't inconsistencies between the runner and manual # update processing frozen = self.freeze() return frozen.process(update) def run(self, workers=2): """Run the bot with the multi-process runner""" inst = runner.BotogramRunner(self, workers=workers) inst.run() def freeze(self): """Return a frozen instance of the bot""" return frozenbot.FrozenBot(self.api, self.about, self.owner, self.hide_commands, self.before_help, self.after_help, self.process_backlog, self.lang, self.itself, self._commands_re, self._components+[self._main_component], self._scheduler, self._main_component._component_id, self._bot_id, self._shared_memory) @property def lang(self): return self._lang @lang.setter def lang(self, lang): """Update the bot's language""" if lang == self._lang: return self._lang_inst = utils.get_language(lang) self._lang = lang def _get_commands(self): """Get all the commands this bot implements""" result = {} for component in self._components: result.update(component._get_commands()) result.update(self._main_component._get_commands()) return result def create(api_key, *args, **kwargs): """Create a new bot""" conn = api.TelegramAPI(api_key) return Bot(conn, *args, **kwargs) def channel(name, api_key): """Get a representation of a channel""" conn = api.TelegramAPI(api_key) obj = objects.Chat({"id": 0, "type": "channel", "username": name}, conn) return obj
Python
0
@@ -2807,39 +2807,32 @@ hook%22%22%22%0A -return self._main_compo @@ -2860,32 +2860,52 @@ ssing_hook(func) +%0A return func %0A%0A def proces @@ -2974,31 +2974,24 @@ %22%22%22%0A -return self._main_c @@ -3029,16 +3029,36 @@ ok(func) +%0A return func %0A%0A de
f04d683d44507a53be39a2db54d545d2f2a1361b
Add example settings module
settings_example.py
settings_example.py
Python
0
@@ -0,0 +1,655 @@ +import os%0Aimport re%0A%0Afrom imap import EmailCheckError, EmailServer%0Afrom postgresql import DatabaseServer%0A%0A%0ACSV_FOLDER = os.getcwd()%0A%0A# Restrict emails by sender.%0AEMAIL_FROM = '[email protected]'%0A%0A# Restrict emails by subject.%0AEMAIL_SUBJECT_RE = re.compile(''.join(%5B%0A%09r'(?P%3Cyear%3E%5Cd%7B4%7D)',%0A%09r'(?P%3Cmonth%3E%5Cd%7B2%7D)',%0A%09r'(?P%3Cday%3E%5Cd%7B2%7D)',%0A%09r'(?P%3Chour%3E%5Cd%7B2%7D)',%0A%09r'(?P%3Cminute%3E%5Cd%7B2%7D)',%0A%09r'%5C.csv',%0A%5D))%0A%0ATABLE_NAME_FORMAT = 'data_%7Byear%7D%7Bmonth%7D'%0A%0A%0Adef get_database_client():%0A%09con = 'my_username/[email protected]:5432/my_database'%0A%09return DatabaseServer(con)%0A%0A%0Adef get_email_client():%0A%09return EmailServer('mail.example.com', 'my_username', 'my_password')%0A
29c20a662f347e720c5228f0853eaa6ac0164379
Create ScreenSocket.py
ScreenSocket.py
ScreenSocket.py
Python
0.000001
@@ -0,0 +1,1813 @@ +#!/usr/bin/env python%0A%0A##--Zachary Trette%0A##-- accepts commands for screen responses%0A##-- EV3- Remote - https://github.com/flyinactor91/EV3-Remote%0A%0A## 2013-12-1%0A%0Afrom socket import *%0Aimport sys, os%0Aimport pygame%0Afrom pygame.locals import *%0A%0Adef setup():%0A pygame.init()%0A w = 640%0A h = 480%0A size=(w,h)%0A screen = pygame.display.set_mode(size,HWSURFACE%7CDOUBLEBUF%7CRESIZABLE)%0A return screen, size%0A%0A%0Adef runCue(SorI, strOrImage):%0A if SorI == %22I%22:%0A im = pygame.image.load(strOrImage)%0A scrn.blit(pygame.transform.scale(im,size),(0,0))%0A%09pygame.display.flip()%09%0A elif SorI == %22T%22:%0A%09basicfont = pygame.font.SysFont(None, 48)%0A%09text = basicfont.render(strOrImage, True, (255, 0, 0), (0, 0, 0))%0A%09textrect = text.get_rect()%0A%09textrect.centerx = scrn.get_rect().centerx%0A%09textrect.centery = scrn.get_rect().centery%0A%09scrn.fill((0,0,0))%0A%09scrn.blit(text, textrect)%0A%09pygame.display.flip()%0A elif SorI == %22C%22:%0A%09basicfont = pygame.font.SysFont(None, 48)%0A%09text = basicfont.render(%22%22, True, (0, 0, 0), (0, 0, 0))%0A%09textrect = text.get_rect()%0A%09textrect.centerx = scrn.get_rect().centerx%0A%09textrect.centery = scrn.get_rect().centery%0A%09scrn.fill((0,0,0))%0A%09scrn.blit(text, textrect)%0A%09pygame.display.flip()%0A%0A%0ATCP_PORT = 5678%0AdefaultTimeout = 5%0Aif len(sys.argv) == 2:%0A%09TCP_IP = sys.argv%5B1%5D%0A%0ABUFFER_SIZE = 1024%0AscreenSocket = socket(AF_INET, SOCK_STREAM)%0AscreenSocket.bind(('' , TCP_PORT))%0AscreenSocket.listen(1)%0Adne = False%0Ascrn, size = setup()%0Awhile not dne:%0A%09connectionSocket , addr = screenSocket.accept()%0A%09connectionSocket.settimeout(defaultTimeout)%0A%09msg = connectionSocket.recv(BUFFER_SIZE)%0A msg = msg.strip()%0A%09if msg == 'QUIT':%0A%09%09print %22DONE%22%0A%09%09dne = True%0A%09else:%09%0A%09%09t = msg%5B0%5D%0A%09%09s = msg%5B1:%5D.strip()%0A%09%09runCue(t,s)%0A%09%0A%09#connectionSocket.send()%0A%09connectionSocket.close()%0A%0AscreenSocket.close()%0A%0A%0A%0A%0A %0A
d79ed2b4aa8315579688f4c6e9bfc8980e9717e3
Create chghost.py
merc/features/ircv32/chghost.py
merc/features/ircv32/chghost.py
Python
0.000003
@@ -0,0 +1,1351 @@ +from merc import capability%0Afrom merc import feature%0Afrom merc import message%0A%0A%0Aclass ChgHostFeature(feature.Feature):%0A NAME = __name__%0A%0A%0Ainstall = ChgHostFeature.install%0A%0A%[email protected]_user_capability%0Aclass ChgHostCapability(capability.Capability):%0A NAME = %22chghost%22%0A%0A%0Aclass _ChgHost(message.Command):%0A def handle_for(self, app, user, prefix):%0A user.check_is_irc_operator()%0A%0A target = self.get_target(app, user)%0A old_hostmask = target.hostmask%0A%0A target.username = self.username%0A target.host = self.host%0A app.network.user_broadcast(target, old_hostmask,%0A ChgHost(self.username, self.host))%0A%0A%[email protected]_user_command%0Aclass ChgHost(_ChgHost):%0A NAME = %22CHGHOST%22%0A MIN_ARITY = 2%0A%0A def __init__(self, username, host, *args):%0A self.username = username%0A self.host = host%0A%0A def as_command_params(self):%0A return %5Bself.username, self.host%5D%0A%0A def can_send_to(self, user):%0A return ChgHostCapability(user).get()%0A%0A def get_target(self, app, user):%0A return user%0A%0A%[email protected]_user_command%0Aclass SAChgHost(_ChgHost):%0A NAME = %22SACHGHOST%22%0A MIN_ARITY = 3%0A%0A def __init__(self, target, username, host, *args):%0A self.target = target%0A self.username = username%0A self.host = host%0A%0A def get_target(self, app, user):%0A return app.users.get(self.target)%0A
d4a7cdd400fe29458cc584455c7b082efed99e2b
Add files via upload
timedscripts/rollupwinlog.py
timedscripts/rollupwinlog.py
Python
0
@@ -0,0 +1,1132 @@ +import requests%0D%0Aimport json%0D%0Aimport pandas as pd%0D%0Aimport sqlite3%0D%0Afrom datetime import date%0D%0Afrom datetime import datetime%0D%0Afrom dateutil import parser%0D%0Aimport time%0D%0A%0D%0ASERVER = '10.24.25.130:8000'%0D%0Aconn = sqlite3.connect('usersrollup.db')%0D%0Ac = conn.cursor()%0D%0A%0D%0Adef makedb():%0D%0A%09c.execute('''Create Table users (username text,compname text,stat text,time text)''')%0D%0A%09conn.commit()%0D%0A%0D%0Anow = str(datetime.now())%09%0D%0Ar = requests.get('http://'+SERVER+'/get_dup')%0D%0A%0D%0Anewtxt = json.loads(r.text)%0D%0Aif (newtxt==%7B%7D): %0D%0A%09print(%22Returned nothing.%22);%0D%0Aelse:%0D%0A%09#print(newtxt,now)%0D%0A%09for x in newtxt:%0D%0A%09%09time.sleep(5)%0D%0A%09%09r = requests.get('http://'+SERVER+'/get_log?username='+x+'&compname=all')%0D%0A%09%09thisreturn = json.loads(r.text)%0D%0A%09%09#print(x,thisreturn)%0D%0A%09%09for key,value in thisreturn.items():%0D%0A%09%09%09data2 = (value%5B'username'%5D,value%5B'compname'%5D,value%5B'stat'%5D,now)%0D%0A%09%09%09try:%0D%0A%09%09%09%09c.execute(%22INSERT INTO users VALUES %22+str(data2))%0D%0A%09%09%09except sqlite3.OperationalError:%0D%0A%09%09%09%09makedb()%0D%0A%09%09%09%09c.execute(%22INSERT INTO users VALUES %22+str(data2))%0D%0A%09%09%09conn.commit()%0D%0A%0D%0A#need to request to clear the log%0D%0A#r = requests.get('http://'+SERVER+'/db?action=clearlog')%0D%0A%0D%0A
98fbfe6e65c4cb32ea0f4f6ce6cba77f7fadcb7b
Add test for vendor object creation
app/api/tests/test_vendor_api.py
app/api/tests/test_vendor_api.py
Python
0
@@ -0,0 +1,2369 @@ +from django.test import Client, TestCase%0A%0Afrom .utils import obtain_api_key, create_admin_account%0A%0A%0Aclass VendorApiTest(TestCase):%0A %22%22%22Test for Vendor API.%22%22%22%0A%0A def setUp(self):%0A self.client = Client()%0A self.endpoint = '/api'%0A self.admin_test_credentials = ('admin', '[email protected]', 'qwerty123')%0A create_admin_account(*self.admin_test_credentials)%0A self.header = %7B%0A 'HTTP_X_TAVERNATOKEN': obtain_api_key(%0A self.client, *self.admin_test_credentials%0A )%0A %7D%0A self.vendors = (%0A ('vendor1', 'info1'),%0A ('vendor2', 'info2')%0A )%0A%0A def make_request(self, query, method='GET'):%0A if method == 'GET':%0A return self.client.get(self.endpoint,%0A data=%7B'query': query%7D,%0A **self.header%0A ).json()%0A%0A if method == 'POST':%0A return self.client.post(self.endpoint,%0A data=%7B'query': query%7D,%0A **self.header%0A ).json()%0A%0A def create_vendor(self, name, info):%0A query = '''%0A mutation%7B%0A createVendor(input: %7Bname: %22%25s%22, info: %22%25s%22%7D)%7B%0A vendor%7B%0A id,%0A originalId,%0A name,%0A info%0A %7D%0A %7D%0A %7D%0A ''' %25 (name, info)%0A%0A return self.make_request(query, 'POST')%0A%0A def retrieve_vendor(self, vendor_id):%0A query = 'query %7Bvendor(id: %22%25s%22) %7Bname%7D%7D' %25 (vendor_id)%0A%0A return self.make_request(query)%0A%0A def create_multiple_vendors(self):%0A return %5Bself.create_vendor(name, info) for name, info in self.vendors%5D%0A%0A def test_creation_of_vendor_object(self):%0A # For new vendor record%0A response = self.create_vendor('vendor4', 'info4')%0A created_vendor = response%5B'vendor'%5D%0A expected = %7B%0A 'vendor': %7B%0A 'id': created_vendor%5B'id'%5D,%0A 'originalId': created_vendor%5B'originalId'%5D,%0A 'name': 'vendor4',%0A 'info': 'info4'%0A %7D%0A %7D%0A self.assertEqual(expected, response)%0A
2cd2d7a20f2d19221b40aac9bfa1303dbfd97459
create metashare.wsgi
metashare/apache/metashare.wsgi
metashare/apache/metashare.wsgi
Python
0
@@ -0,0 +1,360 @@ +import os%0Aimport sys%0A%0Apath = '/var/www/CEF-ELRC'%0Aif path not in sys.path:%0A sys.path.insert(0, path)%0Asys.path.insert(0, '%7B0%7D/metashare'.format(path))%0Asys.path.append('%7B0%7D/lib/python2.7/site-packages'.format(path))%0A%0Aos.environ%5B'DJANGO_SETTINGS_MODULE'%5D = 'metashare.settings'%0Aimport django.core.handlers.wsgi%0Aapplication = django.core.handlers.wsgi.WSGIHandler()
123959bc3594299c2f1d4c54b11a996e92147347
Add missing migration
system_maintenance/migrations/0002_auto_20181214_2122.py
system_maintenance/migrations/0002_auto_20181214_2122.py
Python
0.0002
@@ -0,0 +1,1399 @@ +# Generated by Django 2.1.4 on 2018-12-14 21:22%0A%0Afrom django.db import migrations%0Aimport markupfield_helpers.helpers%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('system_maintenance', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='documentationrecord',%0A name='documentation',%0A field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Document how to perform a task.', null=True, rendered_field=True),%0A ),%0A migrations.AlterField(%0A model_name='maintenancerecord',%0A name='description',%0A field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Enter a description of the system maintenance performed.', null=True, rendered_field=True),%0A ),%0A migrations.AlterField(%0A model_name='maintenancerecord',%0A name='problems',%0A field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Describe problems that arose during system maintenance.', null=True, rendered_field=True),%0A ),%0A migrations.AlterField(%0A model_name='maintenancerecord',%0A name='procedure',%0A field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Enter details of how the system maintenance was performed.', null=True, rendered_field=True),%0A ),%0A %5D%0A
880fb0c36e2adbe810e1b516cdd73ad3283ed859
use server_address, as we need auth
modules/recently_added.py
modules/recently_added.py
from flask import Flask, render_template, send_file import jsonrpclib import urllib from Maraschino import app from settings import * from maraschino.noneditable import * from maraschino.tools import * @app.route('/xhr/recently_added') @requires_auth def xhr_recently_added(): return render_recently_added_episodes() @app.route('/xhr/recently_added_movies') @requires_auth def xhr_recently_added_movies(): return render_recently_added_movies() @app.route('/xhr/recently_added_albums') @requires_auth def xhr_recently_added_albums(): return render_recently_added_albums() @app.route('/xhr/recently_added/<int:episode_offset>') @requires_auth def xhr_recently_added_episodes_offset(episode_offset): return render_recently_added_episodes(episode_offset) @app.route('/xhr/recently_added_movies/<int:movie_offset>') @requires_auth def xhr_recently_added_movies_offset(movie_offset): return render_recently_added_movies(movie_offset) @app.route('/xhr/recently_added_albums/<int:album_offset>') @requires_auth def xhr_recently_added_albums_offset(album_offset): return render_recently_added_albums(album_offset) @app.route('/xhr/vfs_proxy/<path:url>') def xhr_vfs_proxy(url): import StringIO try: vfs_url = '%s/vfs/' % (safe_server_address()) except: vfs_url = None vfs_url += urllib.unquote(url) img = StringIO.StringIO(urllib.urlopen(vfs_url).read()) return send_file(img, mimetype='image/jpeg') def render_recently_added_episodes(episode_offset=0): compact_view = get_setting_value('recently_added_compact') == '1' try: xbmc = jsonrpclib.Server(server_api_address()) recently_added_episodes = get_recently_added_episodes(xbmc, episode_offset) vfs_url = '/xhr/vfs_proxy/' except: recently_added_episodes = [] vfs_url = None return render_template('recently_added.html', recently_added_episodes = recently_added_episodes, vfs_url = vfs_url, episode_offset = episode_offset, compact_view = compact_view, ) def render_recently_added_movies(movie_offset=0): compact_view = get_setting_value('recently_added_movies_compact') == '1' try: xbmc = jsonrpclib.Server(server_api_address()) recently_added_movies = get_recently_added_movies(xbmc, movie_offset) vfs_url = '/xhr/vfs_proxy/' except: recently_added_movies = [] vfs_url = None return render_template('recently_added_movies.html', recently_added_movies = recently_added_movies, vfs_url = vfs_url, movie_offset = movie_offset, compact_view = compact_view, ) def render_recently_added_albums(album_offset=0): compact_view = get_setting_value('recently_added_albums_compact') == '1' try: xbmc = jsonrpclib.Server(server_api_address()) recently_added_albums = get_recently_added_albums(xbmc, album_offset) vfs_url = '/xhr/vfs_proxy/' except: recently_added_albums = [] vfs_url = None return render_template('recently_added_albums.html', recently_added_albums = recently_added_albums, vfs_url = vfs_url, album_offset = album_offset, compact_view = compact_view, ) def get_num_recent_episodes(): try: return int(get_setting_value('num_recent_episodes')) except: return 3 def get_num_recent_movies(): try: return int(get_setting_value('num_recent_movies')) except: return 3 def get_num_recent_albums(): try: return int(get_setting_value('num_recent_albums')) except: return 3 def get_recently_added_episodes(xbmc, episode_offset=0): num_recent_videos = get_num_recent_episodes() try: recently_added_episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes(properties = ['title', 'season', 'episode', 'showtitle', 'playcount', 'thumbnail']) recently_added_episodes = recently_added_episodes['episodes'][episode_offset:num_recent_videos + episode_offset] for cur_ep in recently_added_episodes: cur_ep['safe_thumbnail'] = urllib.quote(cur_ep['thumbnail'], '') except: recently_added_episodes = [] return recently_added_episodes def get_recently_added_movies(xbmc, movie_offset=0): num_recent_videos = get_num_recent_movies() try: recently_added_movies = xbmc.VideoLibrary.GetRecentlyAddedMovies(properties = ['title', 'year', 'rating', 'playcount', 'thumbnail']) recently_added_movies = recently_added_movies['movies'][movie_offset:num_recent_videos + movie_offset] for cur_movie in recently_added_movies: cur_movie['safe_thumbnail'] = urllib.quote(cur_movie['thumbnail'], '') except: recently_added_movies = [] return recently_added_movies def get_recently_added_albums(xbmc, album_offset=0): num_recent_albums = get_num_recent_albums() try: recently_added_albums = xbmc.AudioLibrary.GetRecentlyAddedAlbums(properties = ['title', 'year', 'rating', 'artist', 'thumbnail']) recently_added_albums = recently_added_albums['albums'][album_offset:num_recent_albums + album_offset] for cur_album in recently_added_albums: cur_album['safe_thumbnail'] = urllib.quote(cur_album['thumbnail'], '') except: recently_added_albums = [] return recently_added_albums
Python
0
@@ -1261,21 +1261,16 @@ fs/' %25 ( -safe_ server_a
b080ae154cc8e948e3f4e7b79bfbde0221a31e61
Add devices detected by ping as SOURCE_TYPE_ROUTER instead of GPS (#5625)
homeassistant/components/device_tracker/ping.py
homeassistant/components/device_tracker/ping.py
""" Tracks devices by sending a ICMP ping. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/device_tracker.ping/ device_tracker: - platform: ping count: 2 hosts: host_one: pc.local host_two: 192.168.2.25 """ import logging import subprocess import sys from datetime import timedelta import voluptuous as vol from homeassistant.components.device_tracker import ( PLATFORM_SCHEMA, DEFAULT_SCAN_INTERVAL) from homeassistant.helpers.event import track_point_in_utc_time from homeassistant import util from homeassistant import const import homeassistant.helpers.config_validation as cv DEPENDENCIES = [] _LOGGER = logging.getLogger(__name__) CONF_PING_COUNT = 'count' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(const.CONF_HOSTS): {cv.string: cv.string}, vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int, }) class Host: """Host object with ping detection.""" def __init__(self, ip_address, dev_id, hass, config): """Initialize the Host pinger.""" self.hass = hass self.ip_address = ip_address self.dev_id = dev_id self._count = config[CONF_PING_COUNT] if sys.platform == "win32": self._ping_cmd = ['ping', '-n 1', '-w 1000', self.ip_address] else: self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1', self.ip_address] def ping(self): """Send ICMP ping and return True if success.""" pinger = subprocess.Popen(self._ping_cmd, stdout=subprocess.PIPE) try: pinger.communicate() return pinger.returncode == 0 except subprocess.CalledProcessError: return False def update(self, see): """Update device state by sending one or more ping messages.""" failed = 0 while failed < self._count: # check more times if host in unreachable if self.ping(): see(dev_id=self.dev_id) return True failed += 1 _LOGGER.debug("ping KO on ip=%s failed=%d", self.ip_address, failed) def setup_scanner(hass, config, see): """Setup the Host objects and return the update function.""" hosts = [Host(ip, dev_id, hass, config) for (dev_id, ip) in config[const.CONF_HOSTS].items()] interval = timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + \ DEFAULT_SCAN_INTERVAL _LOGGER.info("Started ping tracker with interval=%s on hosts: %s", interval, ",".join([host.ip_address for host in hosts])) def update(now): """Update all the hosts on every interval time.""" for host in hosts: host.update(see) track_point_in_utc_time(hass, update, now + interval) return True return update(util.dt.utcnow())
Python
0
@@ -486,16 +486,36 @@ INTERVAL +, SOURCE_TYPE_ROUTER )%0Afrom h @@ -2048,16 +2048,48 @@ f.dev_id +, source_type=SOURCE_TYPE_ROUTER )%0A
cebaba60bf12b00ce267fb84cb3029eb318041c2
Flask hello world
synthia.py
synthia.py
Python
0.999432
@@ -0,0 +1,110 @@ +from flask import Flask%0Aapp = Flask(__name__)%0A%0A%[email protected]('/')%0Adef hello_world():%0A return 'Hello, world.'%0A
7c33e8c7a386e911d835f81e637515d40dfc4e62
Add a Laplace equation solving benchmark (from Numpy)
benchmarks/bench_laplace.py
benchmarks/bench_laplace.py
Python
0.000031
@@ -0,0 +1,861 @@ +%22%22%22%0ABenchmark Laplace equation solving.%0A%0AFrom the Numpy benchmark suite, original code at%0Ahttps://github.com/yarikoptic/numpy-vbench/commit/a192bfd43043d413cc5d27526a9b28ad343b2499%0A%22%22%22%0A%0Aimport numpy as np%0A%0Afrom numba import jit%0A%0A%0Adx = 0.1%0Ady = 0.1%0Adx2 = (dx * dx)%0Ady2 = (dy * dy)%0A%0A@jit(nopython=True)%0Adef laplace(N, Niter):%0A u = np.zeros((N, N))%0A u%5B0%5D = 1%0A for i in range(Niter):%0A u%5B1:(-1), 1:(-1)%5D = ((((u%5B2:, 1:(-1)%5D + u%5B:(-2), 1:(-1)%5D) * dy2) +%0A ((u%5B1:(-1), 2:%5D + u%5B1:(-1), :(-2)%5D) * dx2))%0A / (2 * (dx2 + dy2)))%0A return u%0A%0A%0Aclass Laplace:%0A N = 150%0A Niter = 200%0A%0A def setup(self):%0A # Warm up%0A self.run_laplace(10, 10)%0A%0A def run_laplace(self, N, Niter):%0A u = laplace(N, Niter)%0A%0A def time_laplace(self):%0A self.run_laplace(self.N, self.Niter)%0A
4d8845e05cd58d507a00d9a85fbf8caf845fbb3a
implement the `can_import` (for a user) and `is_active` (global config).
oneflow/core/gr_import.py
oneflow/core/gr_import.py
# -*- coding: utf-8 -*- """ 1flow "core" application. It's an Ember.JS based application, which explains why we don't have much things here. Everything takes place in the static/ and templates/ directories. """ import time import redis import logging import datetime import simplejson as json from django.conf import settings LOGGER = logging.getLogger(__name__) REDIS = redis.StrictRedis(host=getattr(settings, 'MAIN_SERVER', 'localhost'), port=6379, db=getattr(settings, 'REDIS_DB', 0)) ftstamp = datetime.datetime.fromtimestamp boolcast = { 'True': True, 'False': False, 'None': None, # The real None is needed in case of a non-existing key. None: None } class GoogleReaderImport(object): """ A small wrapper to get cached and up-to-date results from a GR import. We explicitely need to cast return values. See http://stackoverflow.com/a/13060733/654755 for details. It should normally not be needed (cf. https://github.com/andymccurdy/redis-py#response-callbacks) but for an unknown reason it made me crazy and I finally re-casted them again to make the whole thing work. """ def __init__(self, user): self.key_base = 'gri:{0}'.format(user.id) @classmethod def __time_key(cls, key, set_time=False, time_value=None): if set_time: return REDIS.set(key, time.time() if time_value is None else time_value) return ftstamp(float(REDIS.get(key) or 0.0)) @classmethod def __int_incr_key(cls, key, increment=False): if increment == 'reset': # return, else we increment to 1… return REDIS.delete(key) if increment: return REDIS.incr(key) return int(REDIS.get(key) or 0) @classmethod def __int_set_key(cls, key, set_value=None): if set_value is None: return int(REDIS.get(key) or 0) return REDIS.set(key, set_value) def user_infos(self, infos=None): key = self.key_base + ':uif' if infos is None: return REDIS.get(key) return REDIS.set(key, json.dumps(infos)) def running(self, set_running=None): key = self.key_base + ':run' # Just to be sure we need to cast… # LOGGER.warning('running: set=%s, value=%s type=%s', # set_running, REDIS.get(self.key_base), # type(REDIS.get(self.key_base))) if set_running is None: return boolcast[REDIS.get(key)] return REDIS.set(key, set_running) def start(self, set_time=False, user_infos=None): if set_time: LOGGER.debug('start reset for %s', self) self.running(set_running=True) self.feeds('reset') self.reads('reset') self.starred('reset') self.articles('reset') self.total_feeds(0) self.total_reads(0) if user_infos is not None: self.user_infos(user_infos) return GoogleReaderImport.__time_key(self.key_base + ':start', set_time) def end(self, set_time=False): if self.running(): self.running(set_running=False) return GoogleReaderImport.__time_key(self.key_base + ':end', set_time) def reg_date(self, set_date=None): return GoogleReaderImport.__time_key(self.key_base + ':rd', set_time=set_date is not None, time_value=set_date) def incr_feeds(self): return self.feeds(increment=True) def feeds(self, increment=False): return GoogleReaderImport.__int_incr_key( self.key_base + ':fds', increment) def total_feeds(self, set_total=None): return GoogleReaderImport.__int_set_key( self.key_base + ':tfs', set_total) def incr_reads(self): return self.reads(increment=True) def incr_starred(self): return self.starred(increment=True) def incr_articles(self): return self.articles(increment=True) def reads(self, increment=False): return GoogleReaderImport.__int_incr_key( self.key_base + ':rds', increment) def starred(self, increment=False): return GoogleReaderImport.__int_incr_key( self.key_base + ':str', increment) def articles(self, increment=False): return GoogleReaderImport.__int_incr_key( self.key_base + ':arts', increment) def total_reads(self, set_total=None): return GoogleReaderImport.__int_set_key( self.key_base + ':trds', set_total)
Python
0
@@ -310,35 +310,147 @@ rom -django.conf import settings +constance import config%0Afrom django.conf import settings%0Afrom django.contrib.auth import get_user_model%0A%0Afrom .models.nonrel import Article %0A%0ALO @@ -664,16 +664,41 @@ ', 0))%0A%0A +User = get_user_model()%0A%0A ftstamp @@ -731,16 +731,44 @@ mestamp%0A +today = datetime.date.today%0A boolcast @@ -1420,16 +1420,52 @@ user):%0A + self.user_id = user.id%0A @@ -1478,16 +1478,20 @@ ey_base + = 'gri:%7B @@ -1511,16 +1511,605 @@ er.id)%0A%0A + @property%0A def is_active(self):%0A return today() %3C config.GR_END_DATE %5C%0A and Article.objects().count() %3C config.GR_STORAGE_LIMIT%0A%0A @property%0A def can_import(self):%0A return User.objects.get(id=self.user_id%0A ).profile.data.get('GR_IMPORT_ALLOWED',%0A config.GR_IMPORT_ALLOWED)%0A%0A @can_import.setter%0A def can_import(self, yes_no):%0A user = User.objects.get(id=self.user_id)%0A user.profile.data%5B'GR_IMPORT_ALLOWED'%5D = bool(yes_no)%0A user.profile.save()%0A%0A @cla
9a3c5517446a2f26875925b1c42607ea6aa31b29
Implementing the first step of the Web App Flow.
getAuthenticationCode.py
getAuthenticationCode.py
Python
0.998887
@@ -0,0 +1,1607 @@ +#!/usr/bin/env python%0A#---------------------------------------------------------------------------%0A# Copyright 2013 Kitware Inc.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#---------------------------------------------------------------------------%0A#%0A# This script implements the first step of the Web Application Flow:%0A#%0A# http://developer.github.com/v3/oauth/#web-application-flow%0A#%0A#---------------------------------------------------------------------------%0A%0Aimport requests%0Aimport json%0A%0AjsonFile=open('LocalConfiguration/accountCredentials.json')%0AaccountCredentials=json.load(jsonFile)%0AjsonFile.close()%0A%0AclientID=accountCredentials%5B'Client ID'%5D%0AredirectURI=accountCredentials%5B'Redirect URI'%5D%0Ascopes=accountCredentials%5B'Scopes'%5D%0Astate=accountCredentials%5B'State'%5D%0A%0AclientIdString='client_id='+clientID%0A%0Apayload=%7B'client_id':clientID,'redirect_uri':redirectURI,'scope':scopes,'state':state%7D%0A%0Aurl='https://github.com/login/oauth/authorize?'+clientIdString%0A%0Aheaders=%7B'Content-Type':'application/json','Accept':'application/json'%7D%0A%0Ar = requests.get(url,data=json.dumps(payload),headers=headers)%0A%0Aprint r%0A%0A
8956ee3bd89b12da20ebb1946d41c4133467ae79
Add py-pure-eval (#19180)
var/spack/repos/builtin/packages/py-pure-eval/package.py
var/spack/repos/builtin/packages/py-pure-eval/package.py
Python
0.000002
@@ -0,0 +1,754 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyPureEval(PythonPackage):%0A %22%22%22This is a Python package that lets you safely evaluate certain AST nodes%0A without triggering arbitrary code that may have unwanted side effects.%22%22%22%0A%0A homepage = %22https://github.com/alexmojaki/pure_eval%22%0A url = %22https://github.com/alexmojaki/pure_eval/archive/master.zip%22%0A git = %22https://github.com/alexmojaki/pure_eval.git%22%0A%0A version('master', branch='master')%0A%0A depends_on('[email protected]:3.9', type=('build', 'run'))%0A depends_on('py-setuptools@44:', type='build')%0A
6d1eda812d57c6c251fb037b005103172de886af
Update __init__.py
erpnext/__init__.py
erpnext/__init__.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import frappe __version__ = '7.1.0-beta' def get_default_company(user=None): '''Get default company for user''' from frappe.defaults import get_user_default_as_list if not user: user = frappe.session.user companies = get_user_default_as_list(user, 'company') if companies: default_company = companies[0] else: default_company = frappe.db.get_single_value('Global Defaults', 'default_company') return default_company def get_default_currency(): '''Returns the currency of the default company''' company = get_default_company() if company: return frappe.db.get_value('Company', company, 'default_currency')
Python
0.000072
@@ -93,16 +93,12 @@ '7. -1.0-beta +0.63 '%0A%0Ad
f0bd64992b05b0e7edd4b1ac6e99e1cd9db213d6
Create search.py
search.py
search.py
Python
0.000001
@@ -0,0 +1 @@ +%0A
c74a9943bbd9e7908ba884e0fea5b3390e8d668e
add migration
wastd/observations/migrations/0004_auto_20160905_1943.py
wastd/observations/migrations/0004_auto_20160905_1943.py
Python
0.000001
@@ -0,0 +1,3490 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.9 on 2016-09-05 11:43%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport wastd.observations.models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('observations', '0003_auto_20160902_1206'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='algal_growth',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Algal growth on carapace'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='barnacles',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Barnacles'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='damage_injury',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Obvious damage or injuries'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='missing_limbs',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Missing limbs'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='propeller_damage',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Propeller strike damage'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='scanned_for_pit_tags',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Scanned for PIT tags'),%0A ),%0A migrations.AlterField(%0A model_name='distinguishingfeatureobservation',%0A name='tagging_scars',%0A field=models.CharField(choices=%5B('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')%5D, default='na', help_text='', max_length=300, verbose_name='Tagging scars'),%0A ),%0A migrations.AlterField(%0A model_name='encounter',%0A name='location_accuracy',%0A field=models.CharField(choices=%5B('10', 'GPS reading at exact location (10 m)'), ('1000', 'Site centroid or place name (1 km)'), ('10000', 'Rough estimate (10 km)')%5D, default='1000', help_text='The accuracy of the supplied location.', max_length=300, verbose_name='Location accuracy (m)'),%0A ),%0A migrations.AlterField(%0A model_name='mediaattachment',%0A name='attachment',%0A field=models.FileField(help_text='Upload the file', max_length=500, upload_to=wastd.observations.models.encounter_media, verbose_name='File attachment'),%0A ),%0A %5D%0A
9115628cf10e194f1975e01142d8ae08ab5c4b06
Add test for pandas dataframe loading
joommf/test_odtreader.py
joommf/test_odtreader.py
Python
0.000001
@@ -0,0 +1,1206 @@ +def test_odtreader_dynamics_example():%0A from joommf.sim import Sim%0A from joommf.mesh import Mesh%0A from joommf.energies.exchange import Exchange%0A from joommf.energies.demag import Demag%0A from joommf.energies.zeeman import FixedZeeman%0A from joommf.drivers import evolver%0A # Mesh specification.%0A lx = ly = lz = 50e-9 # x, y, and z dimensions (m)%0A dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)%0A%0A Ms = 8e5 # saturation magnetisation (A/m)%0A A = 1e-11 # exchange energy constant (J/m)%0A H = (1e3, 0, 0) # external magnetic field (A/m)%0A m_init = (0, 0, 1) # initial magnetisation%0A t_sim = 0.5e-9 # simulation time (s)%0A gamma = 2.21e5%0A alpha = 0.1%0A # Create a mesh.%0A mesh = Mesh((lx, ly, lz), (dx, dy, dz))%0A%0A # Create a simulation object.%0A sim = Sim(mesh, Ms, name='dynamics_example', debug=True)%0A%0A # Add energies.%0A sim.add_energy(Exchange(A))%0A sim.add_energy(Demag())%0A sim.add_energy(FixedZeeman(H))%0A sim.set_evolver(%0A evolver.LLG(t_sim, m_init, Ms, alpha, gamma, name='evolver'))%0A # Set initial magnetisation.%0A sim.set_m(m_init)%0A # Run simulation.%0A sim.run()%0A%0A assert sim.df.time.values%5B-1%5D == 0.5e-09%0A
64d8f45e1868fd73415e8f1fe6acc21868d45fa7
Add rfreceiver mode selector
catkin/src/appctl/scripts/rfreceiver_mode_select.py
catkin/src/appctl/scripts/rfreceiver_mode_select.py
Python
0
@@ -0,0 +1,943 @@ +#!/usr/bin/env python%0A%0A%22%22%22%0AThis node listens for keyfob button presses and changes the mode accordingly.%0A%22%22%22%0A%0Aimport rospy%0Afrom appctl.msg import Mode%0Afrom std_msgs.msg import Byte%0A%0A%0Aclass ButtonHandler:%0A def __init__(self, modes, mode_pub):%0A self.modes = modes%0A self.mode_pub = mode_pub%0A%0A def handle_msg(self, msg):%0A if msg.data in self.modes:%0A self.mode_pub.publish(mode=self.modes%5Bmsg.data%5D)%0A%0A%0Adef main():%0A rospy.init_node('rfreceiver_mode_select')%0A%0A modes = %7B%0A 1: 'tactile',%0A 2: 'attended'%0A %7D%0A%0A mode_pub = rospy.Publisher(%0A '/appctl/mode',%0A Mode,%0A queue_size = 1%0A )%0A%0A button_handler = ButtonHandler(modes, mode_pub)%0A%0A mode_sub = rospy.Subscriber(%0A '/rfreceiver/buttondown',%0A Byte,%0A button_handler.handle_msg%0A )%0A%0A rospy.spin()%0A%0Aif __name__=='__main__':%0A main()%0A%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
9e58f5507ba0a128c696bdec218d244df27feb87
add list_graspability script
jsk_arc2017_common/scripts/list_graspability.py
jsk_arc2017_common/scripts/list_graspability.py
Python
0.000001
@@ -0,0 +1,303 @@ +#!/usr/bin/env python%0A%0Aimport jsk_arc2017_common%0A%0Agraspability = jsk_arc2017_common.get_object_graspability()%0Afor obj_id, obj in enumerate(graspability):%0A print('%7B:02%7D: %7B%7D'.format(obj_id+1, obj))%0A for style in graspability%5Bobj%5D:%0A print(' %7B%7D: %7B%7D'.format(style, graspability%5Bobj%5D%5Bstyle%5D))%0A
bdc04453938366e28ff91b6e16c536eca84d8bef
add summary generator
summary.py
summary.py
Python
0.000001
@@ -0,0 +1,2994 @@ +#!/usr/bin/env python%0A%0Aimport os%0Aimport json%0Aimport argparse%0Afrom argparse import ArgumentDefaultsHelpFormatter%0A%0Afrom time import gmtime, strftime, mktime%0Aimport datetime%0A%0A%0Aclass DatetimeConverter(object):%0A TIME_STR_FORMAT = '%25Y-%25m-%25dT%25H:%25M:%25S'%0A%0A @staticmethod%0A def get_UTC():%0A return gmtime()%0A%0A @staticmethod%0A def get_string_UTC():%0A return strftime(DatetimeConverter.TIME_STR_FORMAT, gmtime())%0A%0A @staticmethod%0A def get_datetime_from_string(input_time_string):%0A return datetime.strptime(input_time_string, DatetimeConverter.TIME_STR_FORMAT)%0A%0A @staticmethod%0A def get_timestamp_from_string(input_time_string):%0A return mktime(DatetimeConverter.get_datetime_from_string(input_time_string).timetuple())%0A%0A%0Aclass SummaryGenerator(object):%0A%0A def __init__(self, root_folder):%0A self.root_folder = root_folder%0A%0A def list_to_hierarchy_dict(self, dict_root, input_list):%0A if input_list:%0A node = input_list%5B0%5D%0A if type(input_list%5B0%5D) is not str:%0A node = str(input_list%5B0%5D)%0A current_node = dict_root.setdefault(node, %7B%7D)%0A self.list_to_hierarchy_dict(current_node, input_list%5B1:%5D)%0A%0A def generate_summary_dict(self):%0A ret_dict = %7B%7D%0A for root, dirs, files in os.walk(self.root_folder):%0A has_time = False%0A time_list = %5B%5D%0A time_sum = 0%0A time_counter = 0%0A for f in files:%0A if f.endswith('time'):%0A has_time = True%0A try:%0A t = int(f.replace('.time', ''))%0A time_list.append(t)%0A time_sum += t%0A time_counter += 1%0A except Exception:%0A pass%0A if has_time:%0A # generate hierarchy dir dict from list%0A dir_structure = root.split(os.sep)%0A self.list_to_hierarchy_dict(ret_dict, dir_structure)%0A%0A # go to the inner dir%0A cur_dir = ret_dict%0A for next_dir in dir_structure:%0A cur_dir = cur_dir%5Bnext_dir%5D%0A cur_dir%5Bstr(time_sum / time_counter)%5D = time_list%0A return ret_dict%0A%0A def run(self):%0A summary_dict = self.generate_summary_dict()%0A utc_time = DatetimeConverter.get_string_UTC()%0A ret_dict = %7B%0A 'summary': summary_dict,%0A 'UTC': utc_time%0A %7D%0A print(json.dumps(ret_dict, indent=4))%0A%0A%0Adef main():%0A arg_parser = argparse.ArgumentParser(description='Summary Generator',%0A formatter_class=ArgumentDefaultsHelpFormatter)%0A arg_parser.add_argument('-d', '--dir', dest='root_folder', action='store', default='.',%0A help='the root folder', required=True)%0A args = arg_parser.parse_args()%0A sg = SummaryGenerator(args.root_folder)%0A sg.run()%0A%0Aif __name__ == '__main__':%0A main()%0A
d75c519eb4c3b276f04ba58277d03801c8568ff0
Create 4.py
solutions/4.py
solutions/4.py
Python
0.000001
@@ -0,0 +1,448 @@ +# A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 %C3%97 99.%0A# Find the largest palindrome made from the product of two 3-digit numbers.%0A%0Adef main():%0A max =0%0A for i in range(999,900,-1):%0A for j in range(999,900,-1):%0A product = str(i*j)%0A if ((product == product%5B::-1%5D) and ((i*j)%3Emax)):%0A max = product%0A print max%0A%0Amain()%0A
21ddecb7804501476d35290b0b0cb2b7311728ab
add hello world tornado
server.py
server.py
Python
0.999999
@@ -0,0 +1,400 @@ +import tornado.ioloop%0Aimport tornado.web%0A%0A%0Aclass MainHandler(tornado.web.RequestHandler):%0A def data_received(self, chunk):%0A pass%0A%0A def get(self):%0A self.write(%22Hello, world%22)%0A%0A%0Adef make_app():%0A return tornado.web.Application(%5B%0A (r%22/%22, MainHandler),%0A %5D)%0A%0A%0Aif __name__ == %22__main__%22:%0A app = make_app()%0A app.listen(8888)%0A tornado.ioloop.IOLoop.current().start()%0A
19cfaf8534626e5c6b5193da40a17cc092b24758
Use tuple instead of a list for DEFAULT_VIEWPORT_VIRTUAL_TAGS
taskwiki/constants.py
taskwiki/constants.py
DEFAULT_VIEWPORT_VIRTUAL_TAGS = ["-DELETED", "-PARENT"] DEFAULT_SORT_ORDER = "due+,pri-,project+"
Python
0.000366
@@ -29,9 +29,9 @@ S = -%5B +( %22-DE @@ -51,9 +51,9 @@ ENT%22 -%5D +) %0ADEF
d93dada0fe434cd736d11b9cfb1635146130f24a
Add 031
031/main.py
031/main.py
Python
0.00051
@@ -0,0 +1,718 @@ +# Integers avoid having to rely on decimal.Decimal%0A# to handle rounding errors%0ACOINS = 1, 2, 5, 10, 20, 50, 100, 200%0ATARGET = 200%0A%0A%0Avisited = set()%0Asolutions = %5B%5D%0Astack = %5B(0, (0,) * len(COINS))%5D%0A%0Awhile stack:%0A total, state = stack.pop()%0A for cn, coin in enumerate(COINS):%0A new_total = total + coin%0A if new_total %3E TARGET:%0A continue%0A new_state = list(state)%0A new_state%5Bcn%5D += 1%0A new_state = tuple(new_state)%0A if new_state not in visited:%0A visited.add(new_state)%0A if new_total == TARGET:%0A solutions.append(new_state)%0A else: # %3C TARGET%0A stack.append((new_total, new_state))%0A%0Aprint(len(solutions))%0A
eaace54d5e7d8d2ebad42cf31cf071a9cf9d3e50
test case for creating a new story
umklapp/test.py
umklapp/test.py
Python
0.00035
@@ -0,0 +1,584 @@ +from django.test import TestCase%0Afrom django.test.utils import override_settings%0A%0Afrom umklapp.models import *%0A%0Aclass UmklappTestCase(TestCase):%0A def addUsers(self):%0A self.users = %5B%5D%0A for i in range(0,7):%0A u = User.objects.create_user(%0A %22user%25d%22 %25 i,%0A %[email protected]%22,%0A %22p455w0rd%22%0A )%0A self.users.append(u)%0A%0Aclass NewStoryTest(UmklappTestCase):%0A def setUp(self):%0A self.addUsers()%0A%0A def testNewStory(self):%0A Story.create_new_story(self.users%5B0%5D, self.users, %22first%22)%0A%0A
d20e468a32d1f476196525848688ae64845c4dce
Add Python solution
sg-ski.py
sg-ski.py
Python
0.000444
@@ -0,0 +1,2694 @@ +#!/usr/bin/env python%0A%0Aimport sys%0A%0Adef parse_map_file(path):%0A map_grid = %5B%5D%0A with open(path, 'r') as f:%0A width, height = map(int, f.readline().split())%0A for line in f:%0A row = map(int, line.split())%0A map_grid.append(row)%0A assert height == len(map_grid)%0A assert width == len(map_grid%5B0%5D)%0A return width, height, map_grid%0A%0A%0Adef make_grid(width, height, initial_value):%0A return %5Bwidth*%5Binitial_value%5D for i in range(height)%5D%0A%0A%0Adef get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations):%0A path_length = path_lengths%5By%5D%5Bx%5D%0A if path_length != -1:%0A return path_length, final_elevations%5By%5D%5Bx%5D%0A%0A current_elevation = map_grid%5By%5D%5Bx%5D%0A longest_path = 0%0A lowest_elevation = current_elevation%0A%0A neighbors = %5B%0A (x, y - 1), # up%0A (x, y + 1), # down%0A (x - 1, y), # left%0A (x + 1, y), # right%0A %5D%0A for xn, yn in neighbors:%0A try:%0A neighbor = map_grid%5Byn%5D%5Bxn%5D%0A except IndexError:%0A continue%0A if neighbor %3C current_elevation:%0A path_length, final_elevation = get_length_and_elevation(xn, yn, map_grid, path_lengths, final_elevations)%0A if path_length %3E longest_path or (path_length == longest_path and final_elevation %3C lowest_elevation):%0A longest_path = path_length%0A lowest_elevation = final_elevation%0A%0A path_length = longest_path + 1%0A path_lengths%5By%5D%5Bx%5D = path_length%0A final_elevations%5By%5D%5Bx%5D = lowest_elevation%0A return path_length, lowest_elevation%0A%0A%0Adef main():%0A sys.stdout.write('Processing...')%0A sys.stdout.flush()%0A try:%0A width, height, map_grid = parse_map_file(sys.argv%5B1%5D)%0A except IOError as e:%0A sys.exit('Unable to read map file: %7B%7D'.format(e))%0A except ValueError as e:%0A sys.exit('Invalid map file: %7B%7D'.format(sys.argv%5B1%5D))%0A%0A # Initialize corresponding grids for path lengths and final elevations%0A path_lengths = make_grid(width, height, -1)%0A final_elevations = make_grid(width, height, -1)%0A%0A longest_path = -1%0A steepest_drop = -1%0A%0A for y, row in enumerate(map_grid):%0A for x, initial_elevation in enumerate(row):%0A path_length, final_elevation = get_length_and_elevation(x, y, map_grid, path_lengths, final_elevations)%0A drop = initial_elevation - final_elevation%0A if path_length %3E longest_path or (path_length == longest_path and drop %3E steepest_drop):%0A longest_path = path_length%0A steepest_drop = drop%0A%0A print '%5CrProcessing... DONE.'%0A print '%5Cnlength = %7B%7D, drop = %7B%7D%5Cn'.format(longest_path, steepest_drop)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
78e0135169d2c53b0b99c7811109eb1da040f14d
add bin2h.py
tools/bin2h.py
tools/bin2h.py
Python
0
@@ -0,0 +1,814 @@ +#!/usr/bin/env python%0A# vim: set expandtab ts=4 sw=4 tw=100:%0A%0Aimport sys%0Afrom optparse import OptionParser%0A%0Aparser = OptionParser()%0Aparser.add_option(%22-b%22, %22--before%22, dest=%22before%22, action=%22append%22,%0A help=%22text to put before, may be specified more than once%22)%0Aparser.add_option(%22-a%22, %22--after%22, dest=%22after%22, action=%22append%22,%0A help=%22text to put after, may be specified more than once%22)%0A(options, args) = parser.parse_args()%0A%0Aif options.before and len(options.before) %3E 0:%0A for b in options.before:%0A print b%0A%0Aoffset = 0%0Af = bytearray(sys.stdin.read())%0Afor c in f:%0A if offset != 0 and offset %25 16 == 0:%0A print %22%22%0A print %22%25#04x,%22 %25 c,%0A offset = offset + 1%0Aprint %22%22%0A%0Aif options.after and len(options.after) %3E 0:%0A for a in options.after:%0A print a%0A%0A
a9fb2000638a2fb5d9944254c0d3583f53d2f44c
Exclude MatrixIndexSelectionImageFilter from the filter coverage test - the image test it requires is not available in WrapITK.
Wrapping/WrapITK/Python/Tests/filterCoverage.py
Wrapping/WrapITK/Python/Tests/filterCoverage.py
#!/usr/bin/env python import sys, re, itk, os from sys import argv # python 2.3 compatibility if sys.version < '2.4' : # set compatibility import sets set = sets.Set def sorted(iterable, cmp=None, key=None, reverse=False) : i = list(iterable) if key : d = {} for v in iterable : k = key(v) if not d.has_key(k) : d[k] = [] d[k].append(v) keys = d.keys() keys.sort(cmp) i = [] for k in keys : i += d[k] else : i.sort(cmp) if reverse : i.reverse() return i # declares filter which will not be wrapped excluded = set([ "UnaryFunctorImageFilter", "ReconstructionImageFilter", "PadImageFilter", "ObjectMorphologyImageFilter", "MovingHistogramDilateImageFilter", "MovingHistogramErodeImageFilter", "MovingHistogramImageFilter", "MovingHistogramMorphologicalGradientImageFilter", "MovingHistogramMorphologyImageFilter", "MorphologyImageFilter", "FFTWRealToComplexConjugateImageFilter", "FFTWComplexConjugateToRealImageFilter", "FFTRealToComplexConjugateImageFilter", "FFTComplexConjugateToRealImageFilter", "SCSLComplexConjugateToRealImageFilter", "SCSLRealToComplexConjugateImageFilter", "BinaryMorphologyImageFilter", "BinaryFunctorImageFilter", "TernaryFunctorImageFilter", "ShiftScaleInPlaceImageFilter", "FastIncrementalBinaryDilateImageFilter", "BasicMorphologicalGradientImageFilter", "TwoOutputExampleImageFilter", "NaryFunctorImageFilter", "NonThreadedShrinkImageFilter", "RegionGrowImageFilter", "ConnectedComponentFunctorImageFilter", "BasicDilateImageFilter", "BasicErodeImageFilter", "BasicErodeImageFilter", "AdaptImageFilter", "DeformationFieldJacobianDeterminantFilter", "WarpJacobianDeterminantFilter", "GetAverageSliceImageFilter", "ValuedRegionalExtremaImageFilter", "ProjectionImageFilter", "AnchorCloseImageFilter", "AnchorDilateImageFilter", "AnchorErodeDilateImageFilter", "AnchorErodeImageFilter", "AnchorOpenCloseImageFilter", "AnchorOpenImageFilter", "MiniPipelineSeparableImageFilter", "OptAdaptiveHistogramEqualizationImageFilter", "OptBinaryMorphologyImageFilter", "OptBlackTopHatImageFilter", "OptGrayscaleDilateImageFilter", "OptGrayscaleErodeImageFilter", "OptGrayscaleMorphologicalClosingImageFilter", "OptGrayscaleMorphologicalOpeningImageFilter", "OptMeanImageFilter", "OptMedianImageFilter", "OptMorphologicalGradientImageFilter", "OptMorphologyImageFilter", "OptNoiseImageFilter", "OptResampleImageFilter", "OptSimpleContourExtractorImageFilter", "OptWhiteTopHatImageFilter", "VanHerkGilWermanDilateImageFilter", "VanHerkGilWermanErodeDilateImageFilter", "VanHerkGilWermanErodeImageFilter", "GradientImageToBloxBoundaryPointImageFilter", "JoinImageFilter", "MaskedMovingHistogramImageFilter", "SimpleFuzzyConnectednessRGBImageFilter", "SimpleFuzzyConnectednessScalarImageFilter", "MeshToMeshFilter", "VectorFuzzyConnectednessImageFilter", "WarpJacobianDeterminantFilter", ]) # get filters from sources headers = [] for d in argv[1:]: headers += sum([ f for p,d,f in os.walk(d) ], []) filters = set([f[len('itk'):-len('.h')] for f in headers if f.endswith("Filter.h")]) - excluded # get filter from wrapper files # remove filters which are not in the toolkit (external projects, PyImageFilter, ...) wrapped = set([a for a in dir(itk) if a.endswith("Filter")]).intersection(filters) nonWrapped = filters - wrapped # print non wrapped filters without much text to stdout, so they can be easily reused for f in sorted(nonWrapped) : print f # and print stats in stderr to avoid poluting the list above print >>sys.stderr print >>sys.stderr, '%i filters' % len(filters) print >>sys.stderr, '%i wrapped filters' % len(wrapped) print >>sys.stderr, '%i non wrapped filters' % len(nonWrapped) print >>sys.stderr, '%f%% covered' % (len(wrapped) / float(len(filters)) * 100) print >>sys.stderr # the goal is to return a non zero value if coverage is not 100% # but we are not yet at this stage ! # # return len(nonWrapped)
Python
0
@@ -3036,16 +3036,53 @@ ilter%22,%0A + %22MatrixIndexSelectionImageFilter%22,%0A %5D)%0A%0A%0A# g
cf8a0105e0c4fc6af04ede6c7ae4fe4f4dac048e
add migrations
accelerator/migrations/0103_update_startupupdate_model.py
accelerator/migrations/0103_update_startupupdate_model.py
Python
0
@@ -0,0 +1,3522 @@ +# Generated by Django 2.2.28 on 2022-05-09 11:20%0A%0Afrom django.db import (%0A migrations,%0A models,%0A)%0A%0A%0Aclass Migration(migrations.Migration):%0A dependencies = %5B%0A ('accelerator', '0102_update_program_model'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='startupupdate',%0A name='acquired_valuation_usd',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Valuation (in US dollars)'),%0A ),%0A migrations.AddField(%0A model_name='startupupdate',%0A name='active_annualized_revenue_usd',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Annualized revenue (in US dollars)'),%0A ),%0A migrations.AddField(%0A model_name='startupupdate',%0A name='active_total_funding_usd',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Total Funding Raised (in US dollars)'),%0A ),%0A migrations.AddField(%0A model_name='startupupdate',%0A name='active_valuation_usd',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Valuation (in US dollars)'),%0A ),%0A migrations.AddField(%0A model_name='startupupdate',%0A name='currency_type',%0A field=models.CharField(%0A choices=%5B%0A ('USD', 'USD'), ('GBP', 'GBP'),%0A ('EUR', 'EUR'), ('JPY', 'JPY'),%0A ('AUD', 'AUD'), ('CAD', 'CAD'),%0A ('CHF', 'CHF'), ('NZD', 'NZD'),%0A ('NGN', 'NGN'), ('MXN', 'MXN')%5D,%0A default='USD',%0A max_length=5,%0A verbose_name='Status Currency'),%0A ),%0A migrations.AddField(%0A model_name='startupupdate',%0A name='ipo_valuation_usd',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Valuation (in US dollars)'),%0A ),%0A migrations.AlterField(%0A model_name='startupupdate',%0A name='active_annualized_revenue',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Annualized revenue'),%0A ),%0A migrations.AlterField(%0A model_name='startupupdate',%0A name='active_total_funding',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Total Funding Raised'),%0A ),%0A migrations.AlterField(%0A model_name='startupupdate',%0A name='active_valuation',%0A field=models.DecimalField(%0A blank=True,%0A decimal_places=2,%0A max_digits=13,%0A null=True,%0A verbose_name='Valuation'),%0A ),%0A %5D%0A
7014c5affa780044fd46911287d883024bae3fae
Create ipy_custom_hbox.py
basics/layout/ipy_custom_hbox.py
basics/layout/ipy_custom_hbox.py
Python
0.000003
@@ -0,0 +1,947 @@ +%0Afrom PySide import QtCore%0Afrom PySide import QtGui%0A%0Aclass MyHBoxLayout(QtGui.QHBoxLayout):%0A def __init__(self, *args, **kwargs):%0A super(MyHBoxLayout, self).__init__(*args, **kwargs)%0A %0A @property%0A def margins(self):%0A return self.contentsMargins()%0A %0A @margins.setter%0A def margins(self, margins):%0A self.setContentsMargins(*margins)%0A%0Aclass MyWidget(QtGui.QWidget):%0A def __init__(self, parent=None):%0A super(MyWidget, self).__init__(parent)%0A self.setLayout(MyHBoxLayout())%0A self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)%0A %0A @property%0A def lay(self):%0A return self.layout()%0A%0Aself = MyWidget()%0A%0Aself.show()%0A%0A##%0A%0Aself.lay.addWidget(QtGui.QPushButton('1'))%0A%0Aself.lay.addWidget(QtGui.QPushButton('2'))%0A%0Aself.lay.margins = %5B0%5D * 4%0A%0Aself.lay.setSpacing(15)%0A%0Aself.lay.addStretch()%0A%0Aself.lay.addWidget(QtGui.QPushButton('3'))%0A%0Aself.lay.setSpacing(0)%0A%0Aself.lay.setSpacing(10)%0A
17067d5d25c5ce755ba86505ffcf1dd6fd572deb
Initialize version.py
version.py
version.py
Python
0.000004
@@ -0,0 +1,54 @@ +%22%22%22python-tutorials version.%22%22%22%0A__version__ = '1.0.1'%0A
c95ef4dc0771098e3589721851865f012b652136
add requirements unit test
awx/main/tests/unit/test_python_requirements.py
awx/main/tests/unit/test_python_requirements.py
Python
0
@@ -0,0 +1,1448 @@ +from pip.operations import freeze%0Afrom django.conf import settings%0A%0Adef test_req():%0A def check_is_in(src, dests):%0A if src not in dests:%0A src2 = %5Bsrc%5B0%5D.replace('_', '-'), src%5B1%5D%5D%0A if src2 not in dests:%0A print(%22%25s not in%22 %25 src2) %0A return False%0A else:%0A print(%22%25s not in%22 %25 src)%0A return False%0A return True%0A%0A base_dir = settings.BASE_DIR%0A%0A reqs_actual = %5B%5D%0A xs = freeze.freeze(local_only=True, requirement=base_dir + %22/../requirements/requirements.txt%22)%0A for x in xs:%0A if '## The following requirements were added by pip freeze' in x:%0A break%0A reqs_actual.append(x.split('=='))%0A%0A reqs_expected = %5B%5D%0A with open(base_dir + %22/../requirements/requirements.txt%22) as f:%0A for line in f:%0A line.rstrip()%0A # TODO: process git requiremenst and use egg%0A if line.strip().startswith('#') or line.strip().startswith('git'):%0A continue%0A if line.startswith('-e'):%0A continue%0A line.rstrip()%0A reqs_expected.append(line.rstrip().split('=='))%0A %0A for r in reqs_actual:%0A print(r)%0A%0A not_found = %5B%5D%0A for r in reqs_expected:%0A res = check_is_in(r, reqs_actual)%0A if res is False:%0A not_found.append(r)%0A%0A raise RuntimeError(%22%25s not found in %5Cn%5Cn%25s%22 %25 (not_found, reqs_expected))%0A%0A%0A
1c27ff1533b48bd06debc438369f20f2d86cab0d
Include `Accept: application/json` to satisfy openshift API requirements
ofcourse/cli/openshift_utils.py
ofcourse/cli/openshift_utils.py
""" Author: Ryan Brown <[email protected]> License: Apache 2.0 """ import logging import os import pkg_resources import re import six import socket import uuid import requests import time import oshift from six.moves import StringIO import dulwich.porcelain as git class NotFound(BaseException): pass openshift_files = { "setup.py": { "contents": """from setuptools import setup setup(name='thecourse', version='1.0', description='courseware on openshift', author='Dr. Professor', author_email='[email protected]', url='http://www.python.org/sigs/distutils-sig/', install_requires=['ofcourse>={version}'], )""".format(version=pkg_resources.get_distribution('ofcourse').version), }, "wsgi.py": { "contents": """#!/usr/bin/python # IMPORTANT: Please do not make changes to this file unless you know what # you're doing. Thank you. import os virtenv = os.environ['OPENSHIFT_PYTHON_DIR'] + '/virtenv/' virtualenv = os.path.join(virtenv, 'bin/activate_this.py') try: execfile(virtualenv, dict(__file__=virtualenv)) except IOError: pass import ofcourse.site application = ofcourse.site.app""", }, } class TempBranch(object): def __init__(self, name, repo, delete=True): self.branch = 'refs/heads/{}'.format(name) self.delete = delete self.repo = repo # save the starting branch so we know where to go back to self.start = self.repo.refs.read_ref('HEAD').replace('ref: ', '') def __enter__(self): self.repo.refs.add_if_new(self.branch, self.repo.head()) self.repo.refs.set_symbolic_ref('HEAD', self.branch) def __exit__(self, exc_type, value, tb): if value is None: self.repo.refs.set_symbolic_ref('HEAD', self.start) # lol, only reset --hard is supported if self.delete: self.repo.refs.remove_if_equals(self.branch, None) else: six.reraise(exc_type, value, tb) git.reset(self.repo, "hard") def push(name, api, domain): repo = git.Repo(os.getcwd()) branch = "temp-{}".format(str(uuid.uuid4())[:8]) set_deploy_branch(name, branch, api, domain) remote = git_url(name, api, domain) if is_dirty(): print("Nuking changes.") git.reset(repo, "hard") with TempBranch(branch, repo, delete=True): for fname, file_info in openshift_files.items(): with open(fname, 'w') as f: f.write(file_info.get("contents", "")) repo.stage(fname) repo.do_commit("Commit openshift files") push_out = StringIO() push_err = StringIO() print("Pushing to openshift (may take a few minutes)") git.push(repo, remote, "refs/heads/{}".format(branch), outstream=push_out, errstream=push_err) push_out.seek(0) out = push_out.read() if not re.match(r'^Push to .* successful.', out): print("There was a failure while pushing") print("---BEGIN STDERR---") push_err.seek(0) print(push_err.read()) print("---BEGIN STDOUT---") print(out) print("There was a failure while pushing") git.rm(repo, openshift_files.keys()) map(os.remove, openshift_files.keys()) return get_app(name, api, domain)['app_url'] def is_clean(): return not is_dirty() def is_dirty(): """Check for uncommitted changes. True if dirty.""" repo = git.Repo(os.getcwd()) s = git.status(repo) return any(s.staged.values() + [s.unstaged]) def get_api(token): oshift.log.setLevel(logging.FATAL) return oshift.Openshift("openshift.redhat.com", token=token) def generate_token(uname, passwd): session = requests.post( "https://openshift.redhat.com/broker/rest/user/authorizations", auth=requests.auth.HTTPBasicAuth(uname, passwd), params={ 'scope': 'session', 'note': 'ofCourse CLI auth token', }, ) if session.status_code != 201: raise Exception("Uhoh {} response={}".format(session.status_code, session.text)) return session.json().get("data", {}).get("token", "") def new_app(name, api, domain, wait_until_available=True): try: get_app(name, api, domain) return except: pass # Ok, the app doesn't exist api.app_create(name, ['python-2.7'], domain_name=domain) if not wait_until_available: return while True: try: app = get_app(name, api, domain) socket.getaddrinfo(requests.utils.urlparse( app['app_url']).netloc, 80) break except NotFound: print("Waiting for new app...") time.sleep(5) except socket.gaierror as e: if e.errno != -2: raise e print("Waiting for new app...") time.sleep(5) def get_app(name, api, domain): apps = [a for a in api.app_list(domain_name=domain)[1] if a.get("name", "") == name] if apps: return apps[0] raise NotFound("Could not find app {}".format(name)) def git_url(name, api, domain): app = get_app(name, api, domain) remote = app['git_url'] # change SSH URL # from "ssh://user@host/dir/repo.git" # to "user@host:dir/repo.git" return remote.replace("ssh://", "").replace("/", ":", 1) def set_deploy_branch(name, branch, api, domain): app = get_app(name, api, domain) if app['deployment_branch'] != branch: api.app_action('UPDATE', name, domain_name=domain, deployment_branch=branch)
Python
0
@@ -4027,24 +4027,72 @@ %0A %7D,%0A + headers=%7B'Accept': 'application/json'%7D,%0A )%0A if
5f47cf46c82d9a48a9efe5ad11c6c3a55896da12
Implement abstract class for csc and csr matrix
cupy/sparse/compressed.py
cupy/sparse/compressed.py
Python
0.000047
@@ -0,0 +1,2488 @@ +from cupy import cusparse%0Afrom cupy.sparse import base%0Afrom cupy.sparse import data as sparse_data%0A%0A%0Aclass _compressed_sparse_matrix(sparse_data._data_matrix):%0A%0A def __init__(self, arg1, shape=None, dtype=None, copy=False):%0A if isinstance(arg1, tuple) and len(arg1) == 3:%0A data, indices, indptr = arg1%0A if shape is not None and len(shape) != 2:%0A raise ValueError(%0A 'Only two-dimensional sparse arrays are supported.')%0A%0A if not(base.isdense(data) and data.ndim == 1 and%0A base.isdense(indices) and indices.ndim == 1 and%0A base.isdense(indptr) and indptr.ndim == 1):%0A raise ValueError(%0A 'data, indices, and indptr should be 1-D')%0A%0A if len(data) != len(indices):%0A raise ValueError('indices and data should have the same size')%0A%0A if dtype is None:%0A dtype = data.dtype%0A%0A if dtype != 'f' and dtype != 'd':%0A raise ValueError('Only float32 and float64 are supported')%0A%0A sparse_data._data_matrix.__init__(self, data)%0A%0A self.indices = indices.astype('i', copy=copy)%0A self.indptr = indptr.astype('i', copy=copy)%0A%0A if shape is None:%0A shape = self._swap(len(indptr) - 1, int(indices.max()) + 1)%0A else:%0A raise ValueError(%0A 'Only (data, indices, indptr) format is supported')%0A%0A major, minor = self._swap(*shape)%0A if len(indptr) != major + 1:%0A raise ValueError('index pointer size (%25d) should be (%25d)'%0A %25 (len(indptr), major + 1))%0A%0A self._descr = cusparse.MatDescriptor.create()%0A self._shape = shape%0A%0A def _with_data(self, data):%0A return self.__class__(%0A (data, self.indices.copy(), self.indptr.copy()), shape=self.shape)%0A%0A def _swap(self, x, y):%0A raise NotImplementedError%0A%0A def get_shape(self):%0A %22%22%22Shape of the matrix.%0A%0A Returns:%0A tuple: Shape of the matrix.%0A %22%22%22%0A return self._shape%0A%0A def getnnz(self, axis=None):%0A %22%22%22Number of stored values, including explicit zeros.%22%22%22%0A if axis is None:%0A return self.data.size%0A else:%0A raise ValueError%0A%0A def sorted_indices(self):%0A %22%22%22Returns a copy of the matrix with sorted indices.%22%22%22%0A x = self.copy()%0A x.sort_indices()%0A return x%0A
36333c275f4d3a66c8f14383c3ada5a42a197bea
Add module for displaying RAM usage
bumblebee/modules/memory.py
bumblebee/modules/memory.py
Python
0
@@ -0,0 +1,854 @@ +import bumblebee.module%0Aimport psutil%0A%0Adef fmt(num, suffix='B'):%0A for unit in %5B %22%22, %22Ki%22, %22Mi%22, %22Gi%22 %5D:%0A if num %3C 1024.0:%0A return %22%7B:.2f%7D%7B%7D%7B%7D%22.format(num, unit, suffix)%0A num /= 1024.0%0A return %22%7B:05.2f%25%7D%7B%7D%7B%7D%22.format(num, %22Gi%22, suffix)%0A%0Aclass Module(bumblebee.module.Module):%0A def __init__(self, args):%0A super(Module, self).__init__(args)%0A self._mem = psutil.virtual_memory()%0A%0A def data(self):%0A self._mem = psutil.virtual_memory()%0A%0A free = self._mem.available%0A total = self._mem.total%0A%0A return %22%7B%7D/%7B%7D (%7B:05.02f%7D%25)%22.format(fmt(self._mem.available), fmt(self._mem.total), 100.0 - self._mem.percent)%0A%0A def warning(self):%0A return self._mem.percent %3C 20%0A%0A def critical(self):%0A return self._mem.percent %3C 10%0A%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A