commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
d34b2a13b454ca2c08bd5e8bc3b38d80fb5367c6
add initial mockup of curses UI
pyfs/ui.py
pyfs/ui.py
Python
0.000001
@@ -0,0 +1,2006 @@ +import curses%0Aimport os%0Aimport sys%0A%0Aimport pyfs%0A%0Aclass SimplePager(object):%0A def __init__(self):%0A self._old_stdout = sys.__stdout__%0A self._old_stdout_fd = os.dup(sys.stdout.fileno())%0A os.dup2(sys.stderr.fileno(), sys.stdout.fileno())%0A%0A tty = open('/dev/tty')%0A os.dup2(tty.fileno(), 0)%0A%0A self._scr = curses.initscr()%0A%0A curses.noecho()%0A curses.cbreak()%0A curses.raw()%0A%0A self._scr.keypad(1)%0A%0A self._config = %7B%0A 'default': 'find ./'%0A %7D%0A%0A def cleanup(self):%0A self._scr.keypad(0)%0A curses.nocbreak()%0A curses.echo()%0A curses.endwin()%0A%0A os.dup2(self._old_stdout_fd, sys.stdout.fileno())%0A sys.stdout = self._old_stdout%0A%0A def run(self):%0A scanner = pyfs.Scanner(self._config)%0A scorer = pyfs.WeightedDistanceScore()%0A%0A self._scr.addstr(%22Scanning ...%22)%0A self._scr.refresh()%0A files = scanner.scan()%0A%0A max_y, _ = self._scr.getmaxyx()%0A max_y -= 1%0A%0A self._scr.clear()%0A for line, match in enumerate(files%5B:max_y%5D):%0A self._scr.addstr(line, 0, match)%0A self._scr.refresh()%0A%0A%0A fm = pyfs.FuzzyMatch(files=files, scorer=scorer)%0A%0A search = ''%0A while True:%0A c = self._scr.getch()%0A%0A if c in (curses.KEY_ENTER, ord('%5Cn')):%0A break%0A elif c in (curses.KEY_DC, curses.KEY_BACKSPACE):%0A if len(search):%0A search = search%5B:-1%5D%0A else:%0A search += chr(c)%0A%0A fm.update_scores(search)%0A%0A self._scr.clear()%0A for line, match in enumerate(fm.top_matches(max_y)):%0A self._scr.addstr(line, 0, match)%0A self._scr.refresh()%0A%0A self._scr.refresh()%0A self.cleanup()%0A return fm.top_matches(1)%5B0%5D%0A%0A%0Adef main():%0A ui = SimplePager()%0A result = ui.run()%0A sys.stdout.write(result.strip())%0A%0Aif __name__ == '__main__':%0A main()%0A
01b9d4a491e2d732e9684d0782dcbf38df5eeec9
Add adapters.py to new channelworm directory
channelworm/adapters.py
channelworm/adapters.py
Python
0
@@ -0,0 +1,1634 @@ +# configure django to use default settings%0A# note that this can also be done using an environment variable%0Afrom django.conf import settings%0Afrom django.core.exceptions import ImproperlyConfigured%0A%0Aif hasattr(settings, 'DEBUG'):%0A # settings are configured already%0A pass%0Aelse:%0A # load default settings if they're not set%0A from web_app import settings as defaults%0A settings.configure(default_settings=defaults, DEBUG=True)%0A%0Aimport ion_channel.models as C%0Aimport PyOpenWorm as P%0Afrom django.forms.models import model_to_dict%0A%0A%0Aclass PatchClampAdapter(object):%0A %22%22%22Map a channelworm model to a pyopenworm model%22%22%22%0A%0A def __init__(self, cw_obj):%0A # initialize PyOpenWorm connection so we can access its API%0A P.connect()%0A%0A self.channelworm_object = cw_obj%0A cw_dict = model_to_dict(self.channelworm_object)%0A%0A experiment_id = cw_dict.pop('experiment')%0A patch_clamp_id = cw_dict.pop('id')%0A%0A self.pyopenworm_object = P.Experiment()%0A %0A # get the CW model's experiment%0A cw_evidence = C.Experiment.objects.get(id=experiment_id)%0A%0A # make a PyOW evidence object with it%0A pow_evidence = P.Evidence(doi=cw_evidence.doi)%0A%0A # add it to the PyOW experiment model%0A self.pyopenworm_object.reference(pow_evidence)%0A%0A for key, value in cw_dict.iteritems():%0A self.pyopenworm_object.conditions.set(key, value)%0A%0A # we not longer need PyOW API so we can kill the connection%0A P.disconnect()%0A%0A def get_pow(self):%0A return self.pyopenworm_object%0A%0A def get_cw(self):%0A return self.channelworm_object%0A
f4689709f55a5e3209de7221853cb35a36699dcd
add file
check_sphinx.py
check_sphinx.py
Python
0.000001
@@ -0,0 +1,500 @@ +import py%0Aimport subprocess%0A%0Adef test_linkcheck(tmpdir):%0A doctrees = tmpdir.join(%22_build/doctrees%22)%0A htmldir = tmpdir.join(%22_build/html%22)%0A subprocess.check_call(%5B%22sphinx-build%22, %22-W%22, %22-blinkcheck%22, %22-d%22,%0A str(doctrees), %22source%22, str(htmldir)%5D)%0A%0A%0Adef test_build_docs(tmpdir):%0A doctrees = tmpdir.join(%22_build/doctrees%22)%0A htmldir = tmpdir.join(%22_build/html%22)%0A subprocess.check_call(%5B %22sphinx-build%22, %22-n%22, %22-W%22, %22-bhtml%22, %22-d%22,%0A str(doctrees), %22source%22, str(htmldir)%5D)%0A
9207041afb78f8d36442b7ee19b95055ebbc99cd
add test forms
app/tests/test_form.py
app/tests/test_form.py
Python
0
@@ -0,0 +1,982 @@ +from django.test import TestCase%0Afrom app.forms import FormAppOne%0A%0A%0Aclass TestForm(TestCase):%0A%0A def test_invalid_name_form(self):%0A form = FormAppOne(%7B'name': '1234', 'description': 'validate name'%7D)%0A self.assertFalse(form.is_valid())%0A self.assertEquals(form.errors, %7B'name': %5Bu'Name must be only text'%5D%7D)%0A%0A def test_invalid_description_form(self):%0A form = FormAppOne(%7B'name': 'asd'%7D)%0A self.assertFalse(form.is_valid())%0A self.assertEquals(form.errors, %7B'description':%0A %5Bu'This field is required.'%5D%7D)%0A%0A def test_required_fields(self):%0A form = FormAppOne(%7B%7D)%0A self.assertFalse(form.is_valid())%0A self.assertEquals(form.errors, %7B'name': %5Bu'This field is required.'%5D,%0A 'description': %5Bu'This field is required.'%5D%7D)%0A%0A def test_valid_form(self):%0A form = FormAppOne(%7B'name': 'valder', 'description': 'validate name'%7D)%0A self.assertTrue(form.is_valid())%0A
a460b73861d406b14519b3e391190d1b8d7e57a9
Add maximal margin classifier.
max_margin_classifier.py
max_margin_classifier.py
Python
0.000007
@@ -0,0 +1,1124 @@ +import numpy as np%0Aimport matplotlib.pylab as plt%0Afrom sklearn.svm import SVC%0A%0AX = np.array(%5B%5B3, 4%5D, %5B2, 2%5D, %5B4, 4%5D, %5B1, 4%5D, %5B2, 1%5D, %5B4, 3%5D, %5B4, 1%5D%5D)%0Ay = np.array(%5B'Red', 'Red', 'Red', 'Red', 'Blue', 'Blue', 'Blue'%5D)%0A%0Alinear_svm = SVC(kernel = 'linear', C = 2 ** 15)%0Alinear_svm.fit(X, y)%0A## w0 * X_1 + w1 * X_2 + b = 0 %3C=%3E X_2 = -w0 / w1 * X_1 - b / w1%0Aw = linear_svm.coef_%5B0%5D%0Aprint('Margin: %25s'%25(1.0 / np.linalg.norm(w)))%0Ab = linear_svm.intercept_%0Aslope = -w%5B0%5D / w%5B1%5D%0A## points in the separating line%0Axx = np.linspace(np.amin(X%5B:, 0%5D), np.amax(X%5B:, 0%5D))%0Ayy = slope * xx - b / w%5B1%5D%0A## points in the two gutters%0Ayy_top = yy + 1.0 / w%5B1%5D%0Ayy_bottom = yy - 1.0 / w%5B1%5D%0A## canvas%0Afig, ax = plt.subplots(1, 1)%0Aax.set_title('Maximal margin classifier')%0A# draw points%0Aax.scatter(X%5B:, 0%5D, X%5B:, 1%5D, c = y)%0A# draw separating line%0Aax.plot(xx, yy, 'k-')%0A# draw gutters%0Aax.plot(xx, yy_top, 'k--')%0Aax.plot(xx, yy_bottom, 'k--')%0A# draw support vectors%0Aax.scatter(linear_svm.support_vectors_%5B:, 0%5D, linear_svm.support_vectors_%5B:, 1%5D,%0A s = 100, facecolors = 'none')%0A# set labels%0Aax.set_xlabel('X_1')%0Aax.set_ylabel('X_2')%0A%0Aplt.show()
2e821ab48542c89ac41ebc17036bddc164506a22
Backup of some unused code
combine_data/cartesianProductOfIDs.py
combine_data/cartesianProductOfIDs.py
Python
0.000001
@@ -0,0 +1,806 @@ +import argparse%0Aimport itertools%0A%0Aif __name__ == '__main__':%0A%09parser = argparse.ArgumentParser(description='Generate the cartesian product of two ID files')%0A%09parser.add_argument('--idFileA',required=True,type=str,help='First file of IDs')%0A%09parser.add_argument('--idFileB',required=True,type=str,help='Second file of IDS')%0A%09parser.add_argument('--outFile',required=True,type=str,help='Output file')%0A%09args = parser.parse_args()%0A%0A%09with open(args.idFileA) as f:%0A%09%09idsA = %5B int(line.strip()) for line in f %5D%0A%09with open(args.idFileB) as f:%0A%09%09idsB = %5B int(line.strip()) for line in f %5D%0A%0A%09idsA = sorted(list(set(idsA)))%0A%09idsB = sorted(list(set(idsB)))%0A%0A%09with open(args.outFile,'w') as outF:%0A%09%09for a,b in itertools.product(idsA,idsB):%0A%09%09%09outF.write(%22%25d%5Ct%25d%5Cn%22 %25 (min(a,b),max(a,b)))%0A%09%0A%09print %22Processing complete.%22%0A
d3210b3d25a2eef7c4d066878d444b9b381243eb
add roulette
modules/ruletti.py
modules/ruletti.py
Python
0.000387
@@ -0,0 +1,833 @@ +# -*- coding: ISO-8859-15 -*-%0A%0Afrom twisted.internet import reactor%0Afrom core.Uusipuu import UusipuuModule%0Aimport random%0A%0Aclass Module(UusipuuModule):%0A %0A def startup(self):%0A self.scheduled%5B'unban'%5D = %5B%5D%0A%0A def cmd_ruletti(self, user, target, params):%0A nick = user.split('!', 1)%5B0%5D%0A if random.choice(range(0, 6)) %3C 3:%0A self.bot.mode(self.channel, True, 'b %25s!*@*' %25 nick)%0A self.bot.kick(self.channel, nick, 'naps!')%0A self.log('%25s - Nyt napsahti!' %25 nick)%0A d = reactor.callLater(5, self.unban, nick)%0A self.scheduled%5B'unban'%5D.append(d)%0A else:%0A self.chanmsg('%25s: klik!' %25 nick)%0A self.log('%25s - Klik!' %25 nick)%0A %0A def unban(self, nick):%0A self.bot.mode(self.channel, False, 'b %25s!*@*' %25 nick)%0A%0A# vim: set et sw=4:%0A
432cbfc65ea1e6c1b9079915cce20769b88502fe
add wrapper script to run dbify module
scripts/run_dbify.py
scripts/run_dbify.py
Python
0
@@ -0,0 +1,1264 @@ +import logging%0Aimport os%0Aimport sys%0A%0Aimport argparse%0A%0Aimport _mypath%0Afrom bripipetools import genlims%0Afrom bripipetools import dbify%0A%0Adef parse_input_args(parser=None):%0A parser.add_argument('-p', '--import_path',%0A required=True,%0A default=None,%0A help=(%22path to flowcell run folder - e.g., %22%0A %22/mnt/genomics/Illumina/%22%0A %22150218_D00565_0081_BC5UF5ANXX/ - %22%0A %22or workflow batch file%22))%0A parser.add_argument('-d', '--debug',%0A action='store_true',%0A help=(%22Set logging level to debug%22))%0A%0A # Parse and collect input arguments%0A args = parser.parse_args()%0A%0A return parser.parse_args()%0A%0Adef main(argv):%0A parser = argparse.ArgumentParser()%0A args = parse_input_args(parser)%0A%0A if args.debug:%0A logging.basicConfig(level=logging.DEBUG)%0A else:%0A logging.basicConfig(level=logging.INFO)%0A logger = logging.getLogger(__name__)%0A%0A logger.info(%22importing data based on path %7B%7D%22%0A .format(args.import_path))%0A dbify.ImportManager(path=args.import_path, db=genlims.db).run()%0A%0Aif __name__ == %22__main__%22:%0A main(sys.argv%5B1:%5D)%0A
d5b3bce22aec3e84b59fad379859aa636f2d3f1a
Create ColorConvolution.py
ColorConvolution.py
ColorConvolution.py
Python
0
@@ -0,0 +1,1333 @@ +import numpy%0A%0Adef ColorConvolution ( I, W ):%0A '''%0A Reconstructs a color image from the stain matrix %22W%22 and the stain intensity %0A images generated by ColorDeconvolution.%0A *Inputs:%0A I (rgbimage) - an RGB image of type unsigned char.%0A W (matrix) - a 3x3 matrix containing the color vectors in columns. %0A For two stain images the third column is zero and will be %0A complemented using cross-product. Minumum two nonzero %0A columns required.%0A *Outputs:%0A RGB (rgbimage) - a reconstructed RGB image with values ranging from %0A %5B0, 255%5D, suitable for display.%0A *Related functions:%0A ComplementStainMatrix, OpticalDensityFwd, OpticalDensityInv, ColorDeconvolution %0A '''%0A%0A #transform 3D input stain image to 2D stain matrix format%0A m = I.shape%5B0%5D%0A n = I.shape%5B1%5D%0A I = numpy.reshape(I, (m*n,3))%0A%0A #transform input stains to optical density values and convolve, tfm back to stain%0A I = I.astype(dtype=numpy.float32)%0A ODfwd = OpticalDensityFwd(I)%0A ODdeconv = numpy.dot(ODfwd, numpy.transpose(W))%0A ODinv = OpticalDensityInv(ODdeconv)%0A%0A #reshape output, transform type%0A RGB = numpy.reshape(ODinv, (m,n,3))%0A RGB%5BRGB %3E 255%5D = 255%0A RGB = RGB.astype(numpy.uint8)%0A %0A return(RGB)%0A
8f718c536897711663051a613e7f50d564fb4cbc
Call repair as part of upgrade
src/sentry/management/commands/upgrade.py
src/sentry/management/commands/upgrade.py
""" sentry.management.commands.upgrade ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import, print_function from django.core.management import call_command from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Performs any pending database migrations and upgrades' option_list = BaseCommand.option_list + ( make_option('--noinput', action='store_true', dest='noinput', default=False, help='Tells Django to NOT prompt the user for input of any kind.', ), ) def handle(self, **options): call_command( 'syncdb', migrate=True, interactive=(not options['noinput']), traceback=options['traceback'], verbosity=options['verbosity'], )
Python
0
@@ -987,12 +987,205 @@ ,%0A )%0A +%0A call_command(%0A 'repair',%0A interactive=(not options%5B'noinput'%5D),%0A traceback=options%5B'traceback'%5D,%0A verbosity=options%5B'verbosity'%5D,%0A )%0A
6569d7e36693512fdaadfb22a5aaf6f11fe0e084
migrate dataregistry repeater
corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py
corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py
Python
0
@@ -0,0 +1,411 @@ +from corehq.motech.repeaters.management.commands.migrate_caserepeater import Command as MigrateCaseRepeaters%0Afrom corehq.motech.repeaters.models import SQLDataRegistryCaseUpdateRepeater%0A%0A%0Aclass Command(MigrateCaseRepeaters):%0A%0A @classmethod%0A def couch_doc_type(cls):%0A return 'DataRegistryCaseUpdateRepeater'%0A%0A @classmethod%0A def sql_class(cls):%0A return SQLDataRegistryCaseUpdateRepeater%0A
5aa5ac33d2b841fa1d9c707681a9d024168672c4
Create cdbtabledef.py
cdbtabledef.py
cdbtabledef.py
Python
0
@@ -0,0 +1,2252 @@ +%22%22%22cdbtabledef.py%0A%0ADeveloper: Noelle Todd%0ALast Updated: June 5, 2014%0A%0AThis module will create 4 tables for the client database, using the%0Asqlalchemy module, and the sqlite database. This module is still in%0Aearly testing stages, and as such, is subject to many changes, and %0Aprobably contains bugs.%0A%22%22%22%0A%0Afrom sqlalchemy import Column, DateTime, String, Integer, ForeignKey, func%0Afrom sqlalchemy.orm import relationship, backref%0Afrom sqlalchemy.ext.declarative import declarative_base%0Afrom sqlalchemy import create_engine%0Afrom sqlalchemy.orm import sessionmaker%0A%0Aengine = create_engine('sqlite:///test_db.sqlite')%0Asession = sessionmaker()%0Asession.configure(bind=engine)%0A%0Abase = declarative_base()%0A%0Aclass Household(base):%0A%09%22%22%22%0A%09This class creates a table with columns for household data.%0A%09%22%22%22%0A%09__tablename__ = 'household'%0A%09id = Column(Integer, primary_key = True)%0A%09street_address = Column(String)%0A%09apt = Column(String)%0A%09city = Column(String, default = 'Troy')%0A%09state = Column(String, default = 'NY')%0A%09zip = Column(Integer, default = '12180')%0A%09#contact_ID = Column(Integer, ForeignKey('person.id'))%0A%09date_verified = Column(DateTime)%0A%09%0Aclass Person(base):%0A%09%22%22%22%0A%09This class creates a table with columns for individual's data.%0A%09%22%22%22%0A%09__tablename__ = 'person'%0A%09id = Column(Integer, primary_key = True)%0A%09first_name = Column(String)%0A%09last_name = Column(String)%0A%09DOB = Column(DateTime)%0A%09age = Column(Integer)%0A%09phone = Column(Integer)%0A%09date_joined = Column(DateTime)%0A%09HH_ID = Column(Integer, ForeignKey('household.id'))%0A%09household = relationship(Household, backref=backref('members',%0A%09%09%09%09%09%09%09%09%09%09%09%09%09%09uselist = True))%0A%09%09%0Aclass Volunteer(base):%0A%09%22%22%22%0A%09This class creates a table with columns for volunteer data.%0A%09%22%22%22%0A%09__tablename__ = 'volunteer'%0A%09id = Column(Integer, primary_key = True)%0A%09first_name = Column(String)%0A%09last_name = Column(String)%0A%09phone = Column(Integer)%0A%09%0Aclass Visit(base):%0A%09%22%22%22%0A%09This class creates a table with columns tracking visit history.%0A%09%22%22%22%0A%09__tablename__ = 'visit'%0A%09id = Column(Integer, primary_key = True)%0A%09I_ID = Column(Integer, ForeignKey('person.id'))%0A%09HH_ID = Column(Integer, ForeignKey('household.id'))%0A%09Vol_ID = Column(Integer, ForeignKey('volunteer.id'))%0A%09date = Column(DateTime, default = func.now())%0A%0Abase.metadata.create_all(engine)%0A
1172811d073e544d249aeba64f2b6828ee75bd5d
test geometry
tests/test_geometry.py
tests/test_geometry.py
Python
0.000002
@@ -0,0 +1,759 @@ +import numpy as np%0Afrom numpy.testing import assert_allclose%0A%0Afrom geonet.geometry import unit_vector, angle_between%0A%0Adef test_unit_vector():%0A v1 = np.array(%5B1, 0, 0%5D)%0A assert_allclose(unit_vector(v1), v1)%0A%0A v2 = np.array(%5B1, 1, 0%5D)%0A u2 = unit_vector(v2)%0A assert_allclose(np.linalg.norm(u2), 1.0)%0A%0Adef test_angle_between():%0A v1 = np.array(%5B1, 1%5D)%0A v2 = np.array(%5B1, 0%5D)%0A v3 = np.array(%5B0, 1%5D)%0A%0A for v in (v1, v2, v3):%0A assert_allclose(angle_between(v, v), 0.0, atol=1e-6)%0A%0A assert_allclose(angle_between(v1, v2), np.pi/4, atol=1e-6)%0A assert_allclose(angle_between(v2, v1), np.pi/4, atol=1e-6)%0A assert_allclose(angle_between(v2, v3), np.pi/2, atol=1e-6)%0A assert_allclose(angle_between(v3, v2), np.pi/2, atol=1e-6)%0A
f5aab57e443e5b5a7c2507f10c0c6f608d677500
Add simple unparser test
tests/test_unparser.py
tests/test_unparser.py
Python
0.00003
@@ -0,0 +1,434 @@ +# -*- coding: utf-8 -*-%0A%0Afrom pyrql.parser import parser%0Afrom pyrql.unparser import unparser%0A%0Aimport pytest%0A%0A%[email protected]('func', %5B'eq', 'lt', 'le', 'gt', 'ge', 'ne'%5D)%0Adef test_cmp_functions(func):%0A parsed = %7B'name': func, 'args': %5B'a', 1%5D%7D%0A assert unparser.unparse(parsed) == '%25s(a,1)' %25 func%0A%0A parsed = %7B'name': func, 'args': %5B('a', 'b', 'c'), 1%5D%7D%0A assert unparser.unparse(parsed) == '%25s((a,b,c),1)' %25 func%0A
5fb7d1912eda9d6381af3e0cfa7655ed2d6795f2
Create Xclipboard.py
Xclipboard.py
Xclipboard.py
Python
0.000002
@@ -0,0 +1,894 @@ +from tkinter import Tk%0A__all__=%5B'copy','paste','clear'%5D%0A%0A__author__='Calvin(Martin)Adyezik [email protected]'%0A%0A__doc__=%22%22%22simple Module to work with clipboard based on tkinter -Python 3%22%22%22%0A%0A__name__='Xclipboard'%0A%0A%0Adef copy(text):%0A %22%22%22copy text to clipboard %22%22%22%0A try:%0A root=Tk()%0A root.withdraw()%0A root.clipboard_append(text)%0A except Exception as e:%0A print ('Error: ',e)%0A finally:%0A root.destroy()%0A%0A %0Adef paste():%0A %22%22%22paste text from clipboad%22%22%22%0A try:%0A root=Tk()%0A root.withdraw()%0A return root.clipboard_get()%0A except Exception as e:%0A print ('Error: ',e)%0A finally:%0A root.destroy()%0A%0A%0Adef clear():%0A %22%22%22clear clipboard%22%22%22%0A try:%0A root=Tk()%0A root.withdraw()%0A root.clipboard_clear()%0A except Exception as e:%0A print ('Error: ',e)%0A finally:%0A root.destroy()%0A %0A
bbf73c8db9a2af114beb29766d0ca2e16818175b
fix 192: failure in test_disk on linux
test/_linux.py
test/_linux.py
#!/usr/bin/env python # # $Id$ # import unittest import subprocess import sys from test_psutil import sh import psutil class LinuxSpecificTestCase(unittest.TestCase): def test_cached_phymem(self): # test psutil.cached_phymem against "cached" column of free # command line utility p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE) output = p.communicate()[0].strip() if sys.version_info >= (3,): output = str(output, sys.stdout.encoding) free_cmem = int(output.split('\n')[1].split()[6]) psutil_cmem = psutil.cached_phymem() / 1024 self.assertEqual(free_cmem, psutil_cmem) def test_phymem_buffers(self): # test psutil.phymem_buffers against "buffers" column of free # command line utility p = subprocess.Popen("free", shell=1, stdout=subprocess.PIPE) output = p.communicate()[0].strip() if sys.version_info >= (3,): output = str(output, sys.stdout.encoding) free_cmem = int(output.split('\n')[1].split()[5]) psutil_cmem = psutil.phymem_buffers() / 1024 self.assertEqual(free_cmem, psutil_cmem) def test_disks(self): # test psutil.disk_usage() and psutil.disk_partitions() # against "df -a" def df(path): out = sh('df -B 1 "%s"' % path).strip() lines = out.split('\n') lines.pop(0) line = lines.pop(0) dev, total, used, free = line.split()[:4] if dev == 'none': dev = '' total, used, free = int(total), int(used), int(free) return dev, total, used, free for part in psutil.disk_partitions(all=False): usage = psutil.disk_usage(part.mountpoint) dev, total, used, free = df(part.mountpoint) self.assertEqual(part.device, dev) self.assertEqual(usage.total, total) # 10 MB tollerance if abs(usage.free - free) > 10 * 1024 * 1024: self.fail("psutil=%s, df=%s" % usage.free, free) if abs(usage.used - used) > 10 * 1024 * 1024: self.fail("psutil=%s, df=%s" % usage.used, used) if __name__ == '__main__': test_suite = unittest.TestSuite() test_suite.addTest(unittest.makeSuite(LinuxSpecificTestCase)) unittest.TextTestRunner(verbosity=2).run(test_suite)
Python
0.000006
@@ -1328,16 +1328,19 @@ sh('df +-P -B 1 %22%25s
e095eeb084b11bba857daf51b9d0eb0ef22ab5cc
Update bank_reconciliation_statement.py
erpnext/accounts/report/bank_reconciliation_statement/bank_reconciliation_statement.py
erpnext/accounts/report/bank_reconciliation_statement/bank_reconciliation_statement.py
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe.utils import flt from frappe import _ def execute(filters=None): if not filters: filters = {} columns = get_columns() if not filters.get("account"): return columns, [] account_currency = frappe.db.get_value("Account", filters.account, "account_currency") data = get_entries(filters) from erpnext.accounts.utils import get_balance_on balance_as_per_system = get_balance_on(filters["account"], filters["report_date"]) total_debit, total_credit = 0,0 for d in data: total_debit += flt(d.debit) total_credit += flt(d.credit) amounts_not_reflected_in_system = frappe.db.sql(""" select sum(jvd.debit_in_account_currency - jvd.credit_in_account_currency) from `tabJournal Entry Account` jvd, `tabJournal Entry` jv where jvd.parent = jv.name and jv.docstatus=1 and jvd.account=%s and jv.posting_date > %s and jv.clearance_date <= %s and ifnull(jv.is_opening, 'No') = 'No' """, (filters["account"], filters["report_date"], filters["report_date"])) amounts_not_reflected_in_system = flt(amounts_not_reflected_in_system[0][0]) \ if amounts_not_reflected_in_system else 0.0 bank_bal = flt(balance_as_per_system) - flt(total_debit) + flt(total_credit) \ + amounts_not_reflected_in_system data += [ get_balance_row(_("System Balance"), balance_as_per_system, account_currency), {}, { "journal_entry": '"' + _("Amounts not reflected in bank") + '"', "debit": total_debit, "credit": total_credit, "account_currency": account_currency }, get_balance_row(_("Amounts not reflected in system"), amounts_not_reflected_in_system, account_currency), {}, get_balance_row(_("Expected balance as per bank"), bank_bal, account_currency) ] return columns, data def get_columns(): return [ { "fieldname": "posting_date", "label": _("Posting Date"), "fieldtype": "Date", "width": 100 }, { "fieldname": "journal_entry", "label": _("Journal Entry"), "fieldtype": "Link", "options": "Journal Entry", "width": 220 }, { "fieldname": "debit", "label": _("Debit"), "fieldtype": "Currency", "options": "account_currency", "width": 120 }, { "fieldname": "credit", "label": _("Credit"), "fieldtype": "Currency", "options": "account_currency", "width": 120 }, { "fieldname": "against_account", "label": _("Against Account"), "fieldtype": "Link", "options": "Account", "width": 200 }, { "fieldname": "reference", "label": _("Reference"), "fieldtype": "Data", "width": 100 }, { "fieldname": "ref_date", "label": _("Ref Date"), "fieldtype": "Date", "width": 110 }, { "fieldname": "clearance_date", "label": _("Clearance Date"), "fieldtype": "Date", "width": 110 }, { "fieldname": "account_currency", "label": _("Currency"), "fieldtype": "Link", "options": "Currency", "width": 100 } ] def get_entries(filters): entries = frappe.db.sql("""select jv.posting_date, jv.name as journal_entry, jvd.debit_in_account_currency as debit, jvd.credit_in_account_currency as credit, jvd.against_account, jv.cheque_no as reference, jv.cheque_date as ref_date, jv.clearance_date, jvd.account_currency from `tabJournal Entry Account` jvd, `tabJournal Entry` jv where jvd.parent = jv.name and jv.docstatus=1 and jvd.account = %(account)s and jv.posting_date <= %(report_date)s and ifnull(jv.clearance_date, '4000-01-01') > %(report_date)s and ifnull(jv.is_opening, 'No') = 'No' order by jv.name DESC""", filters, as_dict=1) return entries def get_balance_row(label, amount, account_currency): if amount > 0: return { "journal_entry": '"' + label + '"', "debit": amount, "credit": 0, "account_currency": account_currency } else: return { "journal_entry": '"' + label + '"', "debit": 0, "credit": abs(amount), "account_currency": account_currency }
Python
0.000002
@@ -1441,22 +1441,52 @@ (_(%22 -System Balance +Bank Statement balance as per General Ledger %22), @@ -1561,54 +1561,54 @@ y%22: -'%22' + _(%22Amounts not reflected in bank%22) + '%22' +_(%22Outstanding Cheques and Deposits to clear%22) ,%0A%09%09 @@ -1727,39 +1727,48 @@ (_(%22 -Amounts not reflected in system +Cheques and Deposits incorrectly cleared %22), @@ -1854,36 +1854,41 @@ (_(%22 -Expected balance as per bank +Calculated Bank Statement balance %22), @@ -3729,16 +3729,37 @@ rder by +jv.posting_date DESC, jv.name @@ -3787,16 +3787,16 @@ dict=1)%0A - %0A%09return @@ -3906,33 +3906,21 @@ entry%22: -'%22' + label + '%22' +label ,%0A%09%09%09%22de @@ -4035,25 +4035,13 @@ y%22: -'%22' + label + '%22' +label ,%0A%09%09 @@ -4099,31 +4099,32 @@ currency%22: account_currency%0A - %09%09%7D +%0A
501454e30a93b6ec706add520a6b106940b537d9
Create card_pick.py
FiveThirtyEightRiddler/2017-04-21/card_pick.py
FiveThirtyEightRiddler/2017-04-21/card_pick.py
Python
0.000001
@@ -0,0 +1,2393 @@ +import random%0Afrom collections import Counter%0Aimport matplotlib.pyplot as plt%0Afrom multiprocessing import Pool%0Aimport numpy as np%0Aimport itertools%0Afrom mpl_toolkits.mplot3d import Axes3D%0Afrom matplotlib import cm%0A%0A%0Adef simulate_single_run(num_cards, hand_perc, stop_percentage):%0A hand_size = int(num_cards * hand_perc)%0A remaining_cards = list(range(1, num_cards + 1))%0A hand = random.sample(remaining_cards, hand_size)%0A seen_cards = %5B%5D%0A # print(hand, max(hand))%0A for num_card, card in enumerate(hand, start=1):%0A seen_cards.append(card)%0A remaining_cards.remove(card)%0A high_card_so_far = max(seen_cards)%0A prob_draw_higher_than_highest = len(%5Bc for c in remaining_cards if c %3E high_card_so_far%5D) / len(remaining_cards)%0A prob_any_remaining_higher = 1 - ((1 - prob_draw_higher_than_highest) ** (hand_size - num_card))%0A # print(seen_cards, high_card_so_far, prob_draw_higher_than_highest, prob_any_remaining_higher)%0A if prob_any_remaining_higher %3C= stop_percentage:%0A return card == max(hand)%0A%0A%0Adef simulate_single_percentage(num_cards, hand_perc, stop_percentage, trials):%0A return Counter(simulate_single_run(num_cards, hand_perc, stop_percentage) for _ in range(trials))%5BTrue%5D / trials%0A%0A%0Adef trail_multiple_percentages(num_cards, hand_perc, stop_percentages, trials):%0A result = 0%0A for pct in stop_percentages:%0A result = max(result, simulate_single_percentage(num_cards, hand_perc, pct, trials))%0A%0A print(num_cards, hand_perc, result)%0A return result%0A%0A%0Aif __name__ == '__main__':%0A #NUM_CARDS = np.logspace(2, 5, num=4, dtype=int)%0A NUM_CARDS = np.linspace(100, 1000, num=4, dtype=int)%0A HAND_PERC = np.linspace(.2, .7, num=6, dtype=float)%0A PERCENTAGES = np.linspace(0, 1, num=10, dtype=float)%0A SAMPLE_SIZE = 1000%0A%0A with Pool(4) as p:%0A results = p.starmap(trail_multiple_percentages,%0A %5B(num_cards, hand_size, PERCENTAGES, SAMPLE_SIZE) for num_cards, hand_size in%0A itertools.product(NUM_CARDS, HAND_PERC)%5D)%0A%0A results = np.array(results).reshape((len(NUM_CARDS), len(HAND_PERC))).T%0A NUM_CARDS, HAND_PERC = np.meshgrid(NUM_CARDS, HAND_PERC)%0A%0A fig = plt.figure()%0A ax = fig.gca(projection='3d')%0A surf = ax.plot_surface(NUM_CARDS, HAND_PERC, results, linewidth=0, antialiased=False, cmap=cm.coolwarm)%0A plt.show()%0A
4d44d58c91e6a4fdf9ab16acac6320dd5d1f6bb9
Add senlin-manage service list/clean for engine status
senlin/cmd/manage.py
senlin/cmd/manage.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ CLI interface for senlin management. """ import sys from oslo_config import cfg from oslo_log import log as logging from senlin.db import api from senlin import version CONF = cfg.CONF def do_db_version(): '''Print database's current migration level.''' print(api.db_version(api.get_engine())) def do_db_sync(): '''Place a database under migration control and upgrade. DB is created first if necessary. ''' api.db_sync(api.get_engine(), CONF.command.version) def add_command_parsers(subparsers): parser = subparsers.add_parser('db_version') parser.set_defaults(func=do_db_version) parser = subparsers.add_parser('db_sync') parser.set_defaults(func=do_db_sync) parser.add_argument('version', nargs='?') parser.add_argument('current_version', nargs='?') command_opt = cfg.SubCommandOpt('command', title='Commands', help='Show available commands.', handler=add_command_parsers) def main(): logging.register_options(CONF) logging.setup(CONF, 'senlin-manage') CONF.register_cli_opt(command_opt) try: default_config_files = cfg.find_config_files('senlin', 'senlin-engine') CONF(sys.argv[1:], project='senlin', prog='senlin-manage', version=version.version_info.version_string(), default_config_files=default_config_files) except RuntimeError as e: sys.exit("ERROR: %s" % e) try: CONF.command.func() except Exception as e: sys.exit("ERROR: %s" % e)
Python
0.000001
@@ -664,16 +664,82 @@ logging%0A +from oslo_utils import timeutils%0A%0Afrom senlin.common.i18n import _ %0Afrom se @@ -1102,24 +1102,2367 @@ .version)%0A%0A%0A +class ServiceManageCommand(object):%0A def _format_service(self, service):%0A if service is None:%0A return%0A%0A status = 'down'%0A if ((timeutils.utcnow() - service.updated_at).total_seconds() %3C=%0A CONF.periodic_interval):%0A status = 'up'%0A%0A result = %7B%0A 'service_id': service.id,%0A 'binary': service.binary,%0A 'host': service.host,%0A 'topic': service.topic,%0A 'created_at': service.created_at,%0A 'updated_at': service.updated_at,%0A 'status': status%0A %7D%0A return result%0A%0A def service_list(self):%0A services = %5Bself._format_service(service)%0A for service in api.service_get_all()%5D%0A%0A print_format = %22%25-36s %25-24s %25-16s %25-16s %25-10s %25-24s %25-24s%22%0A print(print_format %25 (_('Service ID'),%0A _('Host'),%0A _('Binary'),%0A _('Topic'),%0A _('Status'),%0A _('Created At'),%0A _('Updated At')))%0A%0A for svc in services:%0A print(print_format %25 (svc%5B'service_id'%5D,%0A svc%5B'host'%5D,%0A svc%5B'binary'%5D,%0A svc%5B'topic'%5D,%0A svc%5B'status'%5D,%0A svc%5B'created_at'%5D,%0A svc%5B'updated_at'%5D))%0A%0A def service_clean(self):%0A for service in api.service_get_all():%0A svc = self._format_service(service)%0A if svc%5B'status'%5D == 'down':%0A print(_('Dead service %25s is removed.') %25 svc%5B'service_id'%5D)%0A api.service_delete(svc%5B'service_id'%5D)%0A%0A @staticmethod%0A def add_service_parsers(subparsers):%0A service_parser = subparsers.add_parser('service')%0A service_parser.set_defaults(command_object=ServiceManageCommand)%0A service_subparsers = service_parser.add_subparsers(dest='action')%0A list_parser = service_subparsers.add_parser('list')%0A list_parser.set_defaults(func=ServiceManageCommand().service_list)%0A remove_parser = service_subparsers.add_parser('clean')%0A remove_parser.set_defaults(func=ServiceManageCommand().service_clean)%0A%0A%0A def add_comm @@ -3667,16 +3667,73 @@ b_sync)%0A + ServiceManageCommand.add_service_parsers(subparsers)%0A pars
ab31fea8a0d30bc4b68813ce635880d4682cfc2f
Write base classes for collection profiles.
cohydra/profile.py
cohydra/profile.py
Python
0
@@ -0,0 +1,1897 @@ +import abc%0Aimport logging%0A%0Aimport six%0A%0A%0Aclass Profile(six.with_metaclass(abc.ABCMeta)):%0A %22%22%22Base class for all collection profiles.%0A%0A Attributes:%0A top_dir: Where this profile's files will be stored.%0A parent: The profile from which this profile is derived, or%0A None for a root profile.%0A children: List of child profiles.%0A %22%22%22%0A%0A def __init__(self, top_dir, parent):%0A %22%22%22Create a profile.%0A %22%22%22%0A%0A self.top_dir = top_dir%0A%0A self.parent = parent%0A%0A self.children = %5B%5D%0A%0A if self.parent is not None:%0A self.parent.children.append(self)%0A%0A def __str__(self):%0A return '%25s.%25s(top_dir=%25r, parent=%25r)' %25 (%0A self.__class__.__module__,%0A self.__class__.__name__,%0A self.top_dir,%0A None if self.parent is None else self.parent.top_dir,%0A )%0A%0A def generate_all(self, depth=0):%0A %22%22%22Generate this profile and all of its children.%0A %22%22%22%0A%0A logging.info('%25sGenerating %25s', ' ' * depth, self)%0A self.generate()%0A%0A # TODO: parallelize?%0A for child in self.children:%0A child.generate_all(depth + 1)%0A%0A def print_all(self, depth=0):%0A %22%22%22List all profiles, for debugging.%0A %22%22%22%0A%0A print(' ' * depth + str(self))%0A%0A for child in self.children:%0A child.print_all(depth + 1)%0A%0A def log(self, level, msg, *args, **kwargs):%0A %22%22%22Log, with additional info about the profile.%0A %22%22%22%0A%0A logging.log(%0A level,%0A '%25s: %25s' %25 (self, msg),%0A *args,%0A **kwargs)%0A%0A @abc.abstractmethod%0A def generate(self):%0A %22%22%22Generate this profile from its parent.%0A%0A This method assumes that the parent is up-to-date.%0A %22%22%22%0A%0A pass%0A%0A%0Aclass RootProfile(Profile):%0A %22%22%22Root profile.%0A%0A This is a profile that consists of a directory with original files,%0A instead of a profile derived from another profile's files.%0A %22%22%22%0A%0A def __init__(self, top_dir):%0A Profile.__init__(self, top_dir, None)%0A%0A def generate(self):%0A pass%0A
8fec4b6eef7f1f4ef5840504f6abcdf0d2f9f80d
Adding the concept of a target platform.
tools/cr/cr/base/platform.py
tools/cr/cr/base/platform.py
Python
0.999998
@@ -0,0 +1,1810 @@ +# Copyright 2013 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0A%22%22%22Module for the target platform support.%22%22%22%0A%0Afrom importlib import import_module%0Aimport os%0A%0Aimport cr%0A%0ADEFAULT = cr.Config.From(%0A DEPOT_TOOLS=os.path.join('%7BGOOGLE_CODE%7D', 'depot_tools'),%0A)%0A%0A%0Aclass Platform(cr.Plugin, cr.Plugin.Type):%0A %22%22%22Base class for implementing cr platforms.%0A%0A A platform is the target operating system being compiled for (linux android).%0A %22%22%22%0A%0A _platform_module = import_module('platform', None)%0A SELECTOR = 'CR_PLATFORM'%0A%0A @classmethod%0A def AddArguments(cls, parser):%0A parser.add_argument(%0A '--platform', dest=cls.SELECTOR,%0A choices=cls.Choices(),%0A default=None,%0A help='Sets the target platform to use. Overrides ' + cls.SELECTOR%0A )%0A%0A @classmethod%0A def System(cls):%0A return cls._platform_module.system()%0A%0A def __init__(self):%0A super(Platform, self).__init__()%0A%0A def Activate(self, context):%0A super(Platform, self).Activate(context)%0A if _PathFixup not in context.fixup_hooks:%0A context.fixup_hooks.append(_PathFixup)%0A%0A @cr.Plugin.activemethod%0A def Prepare(self, context):%0A pass%0A%0A @property%0A def paths(self):%0A return %5B%5D%0A%0A%0Adef _PathFixup(context, key, value):%0A %22%22%22A context fixup that does platform specific modifications to the PATH.%22%22%22%0A if key == 'PATH':%0A paths = %5B%5D%0A for entry in Platform.GetActivePlugin(context).paths:%0A entry = context.Substitute(entry)%0A if entry not in paths:%0A paths.append(entry)%0A for entry in value.split(os.path.pathsep):%0A if entry.endswith(os.path.sep + 'goma'):%0A pass%0A elif entry not in paths:%0A paths.append(entry)%0A value = os.path.pathsep.join(paths)%0A return value%0A
8cac10350cdbc33d243a561ba06c25f5d01e9a04
fix for lists
Scripts/SearchIncidents_5.0/SearchIncidents.py
Scripts/SearchIncidents_5.0/SearchIncidents.py
from typing import Dict, List import demistomock as demisto from CommonServerPython import * from CommonServerUserPython import * special = ['n', 't', '\\', '"', '\'', '7', 'r'] def check_if_found_incident(res: List): if res and isinstance(res, list) and isinstance(res[0].get('Contents'), dict): if 'data' not in res[0]['Contents']: raise DemistoException(res[0].get('Contents')) elif res[0]['Contents']['data'] is None: raise DemistoException("Incidents not found.") else: raise DemistoException(f'failed to get incidents from demisto.\nGot: {res}') def is_valid_args(args: Dict): error_msg: List[str] = [] for _key, value in args.items(): i = 0 while i < len(value): if value[i] == '\\': if value[i + 1] not in special: error_msg.append(f'Error while parsing the argument: "{_key}" ' f'\nSucceeded parsing untill:\n- "{value[0:i]}"') else: i += 1 i += 1 if len(error_msg) != 0: raise DemistoException('\n'.join(error_msg)) return True def search_incidents(args: Dict): if is_valid_args(args): res: List = demisto.executeCommand('getIncidents', args) check_if_found_incident(res) data: Dict = res[0]['Contents']['data'] context_entry: Dict = {'foundIncidents': data} headers: List[str] = ['id', 'name', 'severity', 'status', 'owner', 'created', 'closed'] md: str = tableToMarkdown(name="Incidents found", t=data, headers=headers) return_outputs(md, context_entry, res) def main(): args: Dict = demisto.args() try: search_incidents(args) except DemistoException as error: return_error(str(error), error) if __name__ in ('__main__', '__builtin__', 'builtins'): main()
Python
0.000001
@@ -647,70 +647,248 @@ -error_msg: List%5Bstr%5D = %5B%5D%0A for _key, value in args.items(): +array_args: List%5Bstr%5D = %5B'id', 'name', 'status', 'notstatus', 'reason', 'level', 'owner', 'type', 'query'%5D%0A error_msg: List%5Bstr%5D = %5B%5D%0A for _key, value in args.items():%0A if _key in array_args:%0A value = ','.join(value) %0A
37bd6459bff3f9b079897b1392c04681c65fa24e
Fix #8 chat.reply doesn't work in groups
aiotg/chat.py
aiotg/chat.py
import json import logging from functools import partialmethod logger = logging.getLogger("aiotg") class Chat: """ Wrapper for telegram chats, passed to most callbacks """ def send_text(self, text, **options): """ Send a text message to the chat, for available options see https://core.telegram.org/bots/api#sendmessage """ return self.bot.send_message(self.id, text, **options) def reply(self, text, markup=None, parse_mode=None): return self.send_text( text, reply_to_message_id=self.message["message_id"], disable_web_page_preview='true', reply_markup=json.dumps(markup), parse_mode=parse_mode ) def _send_to_chat(self, method, **options): return self.bot.api_call( method, chat_id=str(self.id), **options ) send_audio = partialmethod(_send_to_chat, "sendAudio") send_photo = partialmethod(_send_to_chat, "sendPhoto") send_video = partialmethod(_send_to_chat, "sendVideo") send_document = partialmethod(_send_to_chat, "sendDocument") send_sticker = partialmethod(_send_to_chat, "sendSticker") send_voice = partialmethod(_send_to_chat, "sendVoice") send_locaton = partialmethod(_send_to_chat, "sendLocation") send_chat_action = partialmethod(_send_to_chat, "sendChatAction") def forward_message(self, from_chat_id, message_id): return self.bot.api_call( "forwardMessage", chat_id=self.id, from_chat_id=from_chat_id, message_id=message_id ) def is_group(self): return self.type == "group" def __init__(self, bot, chat_id, chat_type="private", src_message=None): self.bot = bot self.message = src_message sender = src_message['from'] if src_message else {"first_name": "N/A"} self.sender = Sender(sender) self.id = chat_id self.type = chat_type @staticmethod def from_message(bot, message): chat = message["chat"] return Chat(bot, chat["id"], chat["type"], message) class TgChat(Chat): def __init__(self, *args, **kwargs): logger.warning("TgChat is depricated, use Chat instead") super().__init__(*args, **kwargs) class Sender(dict): """A small wrapper for sender info, mostly used for logging""" def __repr__(self): uname = " (%s)" % self["username"] if "username" in self else "" return self['first_name'] + uname class TgSender(Sender): def __init__(self, *args, **kwargs): logger.warning("TgSender is depricated, use Sender instead") super().__init__(*args, **kwargs)
Python
0
@@ -467,20 +467,18 @@ markup= -None +%7B%7D , parse_
1675ecd5ea2d4aaf8d8b6aa76d007d081f92eba6
add context processor for static
nurseconnect/context_processors.py
nurseconnect/context_processors.py
Python
0.000002
@@ -0,0 +1,157 @@ +from django.conf import settings%0A%0A%0Adef compress_settings(request):%0A return %7B%0A 'STATIC_URL': settings.STATIC_URL,%0A 'ENV': settings.ENV%0A %7D%0A
a1820a0e5f9bd891b20f70ab68dfd4bb385047a0
Add utils to allow multiclass classification.
utils/multiclassification.py
utils/multiclassification.py
Python
0
@@ -0,0 +1,1877 @@ +from __future__ import division%0A%0Aimport numpy as np%0A%0Afrom sklearn.multiclass import OneVsOneClassifier%0Afrom sklearn.multiclass import _fit_binary%0Afrom sklearn.externals.joblib import Parallel, delayed%0Afrom unbalanced_dataset import SMOTE%0A%0A%0Adef _fit_ovo_binary(estimator, X, y, i, j, sampling=None):%0A %22%22%22Fit a single binary estimator (one-vs-one).%22%22%22%0A cond = np.logical_or(y == i, y == j)%0A y = y%5Bcond%5D%0A y_binary = np.empty(y.shape, np.int)%0A y_binary%5By == i%5D = 0%0A y_binary%5By == j%5D = 1%0A ind = np.arange(X.shape%5B0%5D)%0A%0A X_values = X%5Bind%5Bcond%5D%5D%0A y_values = y_binary%0A%0A if sampling == 'SMOTE':%0A print 'SMOTE'%0A ratio = 1%0A smote = SMOTE(ratio=ratio)%0A X_values, y_values = smote.fit_transform(X_values, y_values)%0A%0A return _fit_binary(estimator, X_values, y_values, classes=%5Bi, j%5D)%0A%0A%0Aclass CustomOneVsOneClassifier(OneVsOneClassifier):%0A%0A def __init__(self, estimator, n_jobs=1, sampling=None):%0A self.estimator = estimator%0A self.n_jobs = n_jobs%0A self.sampling = sampling%0A%0A def predict_proba(self, X):%0A return super(CustomOneVsOneClassifier, self).decision_function(X)%0A%0A def fit(self, X, y):%0A %22%22%22Fit underlying estimators.%0A Parameters%0A ----------%0A X : (sparse) array-like, shape = %5Bn_samples, n_features%5D%0A Data.%0A y : array-like, shape = %5Bn_samples%5D%0A Multi-class targets.%0A Returns%0A -------%0A self%0A %22%22%22%0A y = np.asarray(y)%0A%0A self.classes_ = np.unique(y)%0A n_classes = self.classes_.shape%5B0%5D%0A self.estimators_ = Parallel(n_jobs=self.n_jobs)(%0A delayed(_fit_ovo_binary)(%0A self.estimator, X, y,%0A self.classes_%5Bi%5D, self.classes_%5Bj%5D, sampling=self.sampling)%0A for i in range(n_classes) for j in range(i + 1, n_classes))%0A%0A return self%0A
18d11a06e1e84ece32277e0860229555aae321f0
Add a snippet (Python).
python/cross-platform_home_directory.py
python/cross-platform_home_directory.py
Python
0.000035
@@ -0,0 +1,91 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport os%0A%0Aprint(os.path.expanduser(%22~%22))%0A%0A
cd62dabc7696205fad3d955381d59df48fdeb2b2
Fix profiler after module structure has been changed in pydevd, take 2 (PY-18442)
python/helpers/profiler/run_profiler.py
python/helpers/profiler/run_profiler.py
import os import pydev_imports import sys import time import traceback from _pydevd_bundle.pydevd_utils import save_main_module from socket import AF_INET from socket import SOCK_STREAM from socket import socket from _prof_imports import ProfilerResponse from prof_io import ProfWriter, ProfReader from prof_util import generate_snapshot_filepath, statsToResponse base_snapshot_path = os.getenv('PYCHARM_SNAPSHOT_PATH') remote_run = bool(os.getenv('PYCHARM_REMOTE_RUN', '')) def StartClient(host, port): """ connects to a host/port """ s = socket(AF_INET, SOCK_STREAM) MAX_TRIES = 100 i = 0 while i < MAX_TRIES: try: s.connect((host, port)) except: i += 1 time.sleep(0.2) continue return s sys.stderr.write("Could not connect to %s: %s\n" % (host, port)) sys.stderr.flush() traceback.print_exc() sys.exit(1) # TODO: is it safe? class Profiler(object): def __init__(self): try: import yappi_profiler self.profiling_backend = yappi_profiler.YappiProfile() print('Starting yappi profiler\n') except ImportError: import cProfile self.profiling_backend = cProfile.Profile() print('Starting cProfile profiler\n') def connect(self, host, port): s = StartClient(host, port) self.initializeNetwork(s) def initializeNetwork(self, sock): try: sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it except: pass self.writer = ProfWriter(sock) self.reader = ProfReader(sock, self) self.reader.start() time.sleep(0.1) # give threads time to start def process(self, message): if hasattr(message, 'save_snapshot'): self.save_snapshot(message.id, generate_snapshot_filepath(message.save_snapshot.filepath, remote_run), remote_run) else: raise AssertionError("Unknown request %s" % dir(message)) def run(self, file): m = save_main_module(file, 'run_profiler') globals = m.__dict__ try: globals['__builtins__'] = __builtins__ except NameError: pass # Not there on Jython... self.start_profiling() try: pydev_imports.execfile(file, globals, globals) # execute the script finally: self.stop_profiling() self.save_snapshot(0, generate_snapshot_filepath(base_snapshot_path, remote_run), remote_run) def start_profiling(self): self.profiling_backend.enable() def stop_profiling(self): self.profiling_backend.disable() def get_snapshot(self): self.profiling_backend.create_stats() return self.profiling_backend.stats def dump_snapshot(self, filename): dir = os.path.dirname(filename) if not os.path.exists(dir): os.makedirs(dir) self.profiling_backend.dump_stats(filename) return filename def save_snapshot(self, id, filename, send_stat=False): self.stop_profiling() if filename is not None: filename = self.dump_snapshot(filename) print('Snapshot saved to %s' % filename) if not send_stat: response = ProfilerResponse(id=id, snapshot_filepath=filename) else: response = ProfilerResponse(id=id) statsToResponse(self.get_snapshot(), response) self.writer.addCommand(response) self.start_profiling() if __name__ == '__main__': host = sys.argv[1] port = int(sys.argv[2]) file = sys.argv[3] del sys.argv[0] del sys.argv[0] del sys.argv[0] profiler = Profiler() try: profiler.connect(host, port) except: sys.stderr.write("Could not connect to %s: %s\n" % (host, port)) traceback.print_exc() sys.exit(1) # add file path to sys.path sys.path.insert(0, os.path.split(file)[0]) profiler.run(file)
Python
0
@@ -7,29 +7,8 @@ os%0A -import pydev_imports%0A impo @@ -43,16 +43,56 @@ aceback%0A +from _pydev_bundle import pydev_imports%0A from _py
d9b991390c11d726c364e09f8010b9beb4cf0fb6
Add "pa" to languages_by_size
pywikibot/families/wikisource_family.py
pywikibot/families/wikisource_family.py
# -*- coding: utf-8 -*- """Family module for Wikisource.""" # # (C) Pywikibot team, 2004-2017 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals from pywikibot import family __version__ = '$Id$' # The Wikimedia family that is known as Wikisource class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikisource.""" name = 'wikisource' closed_wikis = [ # https://noc.wikimedia.org/conf/highlight.php?file=closed.dblist 'ang', 'ht', ] removed_wikis = [ # https://noc.wikimedia.org/conf/highlight.php?file=deleted.dblist 'tokipona', ] def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'pl', 'de', 'ru', 'fr', 'zh', 'he', 'es', 'it', 'ar', 'cs', 'pt', 'fa', 'hu', 'www', 'ml', 'ko', 'sv', 'gu', 'sl', 'bn', 'te', 'sa', 'sr', 'ro', 'fi', 'vi', 'el', 'ja', 'uk', 'th', 'ca', 'hy', 'az', 'hr', 'ta', 'nl', 'br', 'is', 'la', 'no', 'vec', 'eo', 'be', 'tr', 'mk', 'et', 'yi', 'id', 'da', 'as', 'bg', 'li', 'mr', 'kn', 'or', 'bs', 'sah', 'lt', 'gl', 'cy', 'sk', 'zh-min-nan', 'fo', ] super(Family, self).__init__() # All requests to 'mul.wikisource.org/*' are redirected to # the main page, so using 'wikisource.org' self.langs['mul'] = self.domain self.languages_by_size.append('mul') # Global bot allowed languages on # https://meta.wikimedia.org/wiki/BPI#Current_implementation self.cross_allowed = [ 'ca', 'el', 'fa', 'it', 'ko', 'no', 'pl', 'vi', 'zh', ] self.authornamespaces = { '_default': [0], 'ar': [102], 'be': [102], 'bg': [100], 'ca': [106], 'cs': [100], 'da': [102], 'en': [102], 'eo': [102], 'et': [106], 'fa': [102], 'fr': [102], 'he': [108], 'hr': [100], 'hu': [100], 'hy': [100], 'it': [102], 'ko': [100], 'la': [102], 'nl': [102], 'no': [102], 'pl': [104], 'pt': [102], 'ro': [102], 'sv': [106], 'tr': [100], 'vi': [102], 'zh': [102], } # Subpages for documentation. # TODO: List is incomplete, to be completed for missing languages. # TODO: Remove comments for appropriate pages self.doc_subpages = { '_default': ((u'/doc', ), ['ar', 'as', 'az', 'bn', 'en', 'es', 'et', 'gu', 'hu', 'it', 'ja', 'kn', 'ml', 'mk', 'mr', 'pt', 'ro', 'sa', 'sah', 'ta', 'te', 'th', 'vi'] ), 'be': (u'/Дакументацыя', ), 'bn': (u'/নথি', ), 'br': (u'/diellerezh', ), 'de': (u'/Doku', u'/Meta'), 'el': (u'/τεκμηρίωση', ), 'eo': ('u/dokumentado', ), # 'fa': (u'/صفحه الگو', ), # 'fa': (u'/فضای‌نام توضیحات', ), # 'fa': (u'/آغاز جعبه', ), # 'fa': (u'/پایان جعبه۲', ), # 'fa': (u'/آغاز جعبه۲', ), # 'fa': (u'/پایان جعبه', ), # 'fa': (u'/توضیحات', ), 'fr': (u'/documentation', ), 'id': (u'/dok', ), 'ko': (u'/설명문서', ), 'no': (u'/dok', ), 'ru': (u'/Документация', ), 'sl': (u'/dok', ), 'sv': (u'/dok', ), 'uk': (u'/документація', ), }
Python
0.000008
@@ -1230,16 +1230,34 @@ , 'fo',%0A + 'pa',%0A
c8b78cab7a32a300e418033185595fd79a290823
add bmf study
qlcoder/image_processing/bmp_python3.py
qlcoder/image_processing/bmp_python3.py
Python
0.000001
@@ -0,0 +1,500 @@ +import binascii%0Afi = open(%22aaa.bmp%22, %22rb%22)%0Aheader = fi.read(1078)%0Aline = fi.read()%0Afi.close()%0A%0Abinline = ''%0A%0A%0Afor i in range(0,len(line)):%0A binline += bin(line%5Bi%5D)%5B2:%5D.zfill(8)%0Anewbinline = ''%0Afor i in range(len(binline)):%0A if(i%257 == 0):%0A newbinline+='0'%0A newbinline+=binline%5Bi%5D%0Anewhexline = hex(int(newbinline, 2))%5B2:%5D%0Anewhexline = '0' + newhexline%0Anewbyteline = bytes().fromhex(newhexline)%0Afo = open(%22out.bmp%22, %22wb%22)%0Aoutbmp = header + newbyteline%0Aline = fo.write(outbmp)%0Afo.close()
c9b3bd8309d3d1448823787160021a8688e8f3c1
Add python to make vv h5 file
vv_h5_setup.py
vv_h5_setup.py
Python
0.000001
@@ -0,0 +1,862 @@ +import tables%0A%0Avv_desc = dict(%0Aobsid=tables.IntCol(pos=0),%0Arevision=tables.IntCol(pos=1),%0Amost_recent=tables.IntCol(pos=2),%0Aslot=tables.IntCol(pos=3),%0Atype=tables.StringCol(10,pos=4),%0An_pts=tables.IntCol(pos=5),%0Arad_off=tables.FloatCol(pos=6),%0Afrac_dy_big=tables.FloatCol(pos=7),%0Afrac_dz_big=tables.FloatCol(pos=8),%0Afrac_mag_big=tables.FloatCol(pos=9),%0Amean_y =tables.FloatCol(pos=10),%0Amean_z =tables.FloatCol(pos=11),%0Ady_mean=tables.FloatCol(pos=12),%0Ady_med =tables.FloatCol(pos=13),%0Ady_rms =tables.FloatCol(pos=14),%0Adz_mean=tables.FloatCol(pos=15),%0Adz_med =tables.FloatCol(pos=16),%0Adz_rms =tables.FloatCol(pos=17),%0Amag_mean=tables.FloatCol(pos=18),%0Amag_med =tables.FloatCol(pos=19),%0Amag_rms =tables.FloatCol(pos=20),%0A)%0A%0Ah5f = tables.openFile('/data/aca/archive/vv/vv.h5', 'a')%0Atbl = h5f.createTable('/', 'vv', vv_desc)%0Atbl.cols.obsid.createIndex()%0Ah5f.close()%0A
836c7aa92cd9d35e7d54046e835f285410780b84
Create nodejs.py
wigs/nodejs.py
wigs/nodejs.py
Python
0.000032
@@ -0,0 +1,181 @@ +class nodejs(Wig):%0A git_uri = 'https://github.com/nodejs/node'%0A tarball_uri = 'https://github.com/nodejs/node/archive/v$RELEASE_VERSION$.tar.gz'%0A last_release_version = 'v4.7.2'%0A
5c4a199c7c7f457131c38b85b1e42abd315b2d2a
Use system_id from event
ryu/services/protocols/ovsdb/manager.py
ryu/services/protocols/ovsdb/manager.py
# Copyright (c) 2014 Rackspace Hosting # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import ssl import socket from ryu import cfg from ryu.base import app_manager from ryu.lib import hub from ryu.services.protocols.ovsdb import client from ryu.services.protocols.ovsdb import event from ryu.controller import handler opts = (cfg.StrOpt('address', default='0.0.0.0', help='OVSDB address'), cfg.IntOpt('port', default=6640, help='OVSDB port'), cfg.StrOpt('mngr-privkey', default=None, help='manager private key'), cfg.StrOpt('mngr-cert', default=None, help='manager certificate'), cfg.ListOpt('whitelist', default=[], help='Whitelist of address to allow to connect')) cfg.CONF.register_opts(opts, 'ovsdb') class OVSDB(app_manager.RyuApp): _EVENTS = [event.EventNewOVSDBConnection, event.EventModifyRequest, event.EventReadRequest] def __init__(self, *args, **kwargs): super(OVSDB, self).__init__(*args, **kwargs) self._address = self.CONF.ovsdb.address self._port = self.CONF.ovsdb.port self._clients = {} def _accept(self, server): if self.CONF.ovsdb.whitelist: def check(address): if address in self.CONF.ovsdb.whitelist: return True self.logger.debug('Connection from non-whitelist client ' '(%s:%s)' % address) return False else: def check(address): return True while True: # TODO(jkoelker) SSL Certificate Fingerprint check sock, client_address = server.accept() if not check(client_address[0]): sock.shutdown(socket.SHUT_RDWR) sock.close() continue self.logger.debug('New connection from %s:%s' % client_address) t = hub.spawn(self._start_remote, sock, client_address) self.threads.append(t) def _proxy_event(self, ev): system_id = ev.system_id client_name = client.RemoteOvsdb.instance_name(system_id) if client_name not in self._clients: self.logger.info('Unknown remote system_id %s' % system_id) return return self.send_event(client_name, ev) def _start_remote(self, sock, client_address): app = client.RemoteOvsdb.factory(sock, client_address) if app: self._clients[app.name] = app app.start() ev = event.EventNewOVSDBConnection(app.system_id) self.send_event_to_observers(ev) else: sock.shutdown(socket.SHUT_RDWR) sock.close() def start(self): server = hub.listen((self._address, self._port)) key = self.CONF.ovsdb.mngr_privkey or self.CONF.ctl_privkey cert = self.CONF.ovsdb.mngr_cert or self.CONF.ctl_cert if key is not None and cert is not None: ssl_kwargs = dict(keyfile=key, certfile=cert, server_side=True) if self.CONF.ca_certs is not None: ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED ssl_kwargs['ca_certs'] = self.CONF.ca_certs server = ssl.wrap_socket(server, **ssl_kwargs) self._server = server self.logger.info('Listening on %s:%s for clients' % (self._address, self._port)) t = hub.spawn(self._accept, self._server) super(OVSDB, self).start() return t def stop(self): for client in self._clients.values(): client.stop() super(OVSDB, self).stop() @handler.set_ev_cls(event.EventModifyRequest) def modify_request_handler(self, ev): system_id = ev.system_id client_name = client.RemoteOvsdb.instance_name(system_id) remote = self._clients.get(client_name) if not remote: msg = 'Unknown remote system_id %s' % system_id self.logger.info(msg) rep = event.EventModifyReply(system_id, None, None, msg) return self.reply_to_request(ev, rep) return remote.modify_request_handler(ev) @handler.set_ev_cls(event.EventReadRequest) def read_request_handler(self, ev): system_id = ev.system_id client_name = client.RemoteOvsdb.instance_name(system_id) remote = self._clients.get(client_name) if not remote: msg = 'Unknown remote system_id %s' % system_id self.logger.info(msg) rep = event.EventReadReply(self.system_id, None, msg) return self.reply_to_request(ev, rep) return remote.read_request_handler(ev)
Python
0
@@ -5120,21 +5120,16 @@ adReply( -self. system_i
1edf0898422c74173a9b6526c789b140938664b5
add main module
server/__main__.py
server/__main__.py
Python
0.000001
@@ -0,0 +1,13 @@ +import server
d48035b06b952b9ac4d95897d08de50d5977bf9f
Add basic test for OrderedDict.
tests/basics/ordereddict1.py
tests/basics/ordereddict1.py
Python
0
@@ -0,0 +1,355 @@ +try:%0A from collections import OrderedDict%0Aexcept ImportError:%0A try:%0A from _collections import OrderedDict%0A except ImportError:%0A print(%22SKIP%22)%0A import sys%0A sys.exit()%0A%0Ad = OrderedDict(%5B(10, 20), (%22b%22, 100), (1, 2)%5D)%0Aprint(list(d.keys()))%0Aprint(list(d.values()))%0Adel d%5B%22b%22%5D%0Aprint(list(d.keys()))%0Aprint(list(d.values()))%0A
8c13531751552de53de483ec1aad17bd16d4d74d
Configure MEDIA_ROOT and MEDIA_URL properly
mysite/settings.py
mysite/settings.py
# Django settings for mysite project. DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( ('Asheesh Laroia', '[email protected]'), ('Raphael Krut-Landau', '[email protected]') ) MANAGERS = ADMINS DATABASE_OPTIONS = { 'read_default_file': './my.cnf', } TEST_DATABASE_OPTIONS = { 'read_default_file': './my.cnf', } DATABASE_CHARSET = 'utf8' # omg I hate you MySQL DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. DATABASE_NAME = 'oh_milestone_a' # Or path to database file if using sqlite3. DATABASE_USER = 'oh_milestone_a' # Not used with sqlite3. DATABASE_PASSWORD = 'ahmaC0Th' # Not used with sqlite3. DATABASE_HOST = 'renaissance.local' # Set to empty string for localhost. Not used with sqlite3. DATABASE_HOST = 'localhost' # Set to empty string for localhost. Not used with sqlite3. DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3. # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'America/New_York' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # Absolute path to the directory that holds media. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = 'k%&pic%c5%6$%(h&eynhgwhibe9-h!_iq&(@ktx#@1-5g2+he)' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.load_template_source', 'django.template.loaders.app_directories.load_template_source', # 'django.template.loaders.eggs.load_template_source', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) ROOT_URLCONF = 'mysite.urls' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) STATIC_DOC_ROOT = 'static/' # Sessions in /tmp SESSION_ENGINE="django.contrib.sessions.backends.file" INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.webdesign', 'django.contrib.admin', 'windmill', 'south', 'celery', 'search', 'profile', 'customs', 'consulting', 'account', 'base', ) ### HACK from socket import gethostname if gethostname() in ('renaissance', 'yggdrasil', 'builder', 'vellum') and DEBUG: DEBUG_PROPAGATE_EXCEPTIONS=True # file: settings.py # TEST_RUNNER = '_profiling.profile_tests' TEST_PROFILE = '/tmp/profile' ## AMQP, Rabbit Queue, Celery AMQP_SERVER = "localhost" AMQP_PORT = 5672 AMQP_USER = "rabbiter" AMQP_PASSWORD = "johT4qui" AMQP_VHOST = "localhost" cooked_data_password = 'AXQaTjp3' AUTH_PROFILE_MODULE = "profile.Person" LOGIN_URL = '/account/login/' OHLOH_API_KEY='JeXHeaQhjXewhdktn4nUw' # "Oman testing"
Python
0
@@ -1,8 +1,18 @@ +import os%0A # Django @@ -1753,18 +1753,65 @@ _ROOT = -'' +os.path.join(os.path.dirname(__file__), 'static') %0A%0A# URL @@ -2031,16 +2031,24 @@ _URL = ' +/static/ '%0A%0A# URL
45ccdce362694f50c43828e3923fc9e3fa32c8bb
Add list_callbacks.py
scripts/list_callbacks.py
scripts/list_callbacks.py
Python
0.000003
@@ -0,0 +1,202 @@ +#!/usr/bin/env python2%0A%0Aimport sys%0A%0Afrom parse_header import *%0A%0Adef main(argv):%0A%09for type, name, args, attrs in get_callbacks(sys.stdin.read()):%0A%09%09print name%0A%0Aif __name__ == %22__main__%22:%0A%09main(sys.argv)%0A
f075f21b53e13d53fc26e38bcf995d55ea44df67
Patch bump for pypi
exa/__init__.py
exa/__init__.py
# -*- coding: utf-8 -*- # Copyright (c) 2015-2016, Exa Analytics Development Team # Distributed under the terms of the Apache License 2.0 ''' Exa ######### This package creates a systematic infrastructure for an ecosystem of packages, tailored to specific industry or academic displines, for organizing, processing, analyzing, and visualizing data. It is built with minimal dependencies, leverages established open-source packages, is itself extensible, and is targeted at both industry and academic applications. At a high level, data objects such as series or dataframes (i.e. `pandas`_ like objects) are organized into containers which track relationships between these objects and provide methods for computation, conversion to other formats, analysis, and visualization within the `Jupyter notebook`_ environment. .. _pandas: http://pandas.pydata.org/pandas-docs/stable/index.html .. _Jupyter notebook: http://jupyter.org/ ''' __exa_version__ = (0, 2, 4) __version__ = '.'.join((str(v) for v in __exa_version__)) from exa import _config from exa import log from exa import test from exa import relational from exa import widget from exa import math from exa import distributed from exa import mpl, tex from exa import error # User API from exa.numerical import Series, DataFrame, Field3D, SparseSeries, SparseDataFrame from exa.container import Container from exa.editor import Editor from exa.filetypes import CSV
Python
0
@@ -976,9 +976,9 @@ 2, -4 +5 )%0D%0A_
ec91d5106bfab93e0540e5dc4a0bbd9b7cea151b
add script to update old .h5 files to support normalization
scripts/update_data_h5.py
scripts/update_data_h5.py
Python
0
@@ -0,0 +1,719 @@ +# update h5 files created by old versions of pyannote-speaker-embedding%0A# estimate mu/sigma and save it back to the file%0A# usage: update_data_h5.py /path/to/file.h5%0A%0Aimport sys%0Aimport h5py%0Aimport numpy as np%0Afrom tqdm import tqdm%0A%0Adata_h5 = sys.argv%5B1%5D%0A%0Awith h5py.File(data_h5, mode='r') as fp:%0A X = fp%5B'X'%5D%0A weights, means, squared_means = zip(*(%0A (len(x), np.mean(x, axis=0), np.mean(x**2, axis=0))%0A for x in tqdm(X)))%0A mu = np.average(means, weights=weights, axis=0)%0A squared_mean = np.average(squared_means, weights=weights, axis=0)%0A sigma = np.sqrt(squared_mean - mu ** 2)%0A%0A%0Awith h5py.File(data_h5, mode='r+') as fp:%0A X = fp%5B'X'%5D%0A X.attrs%5B'mu'%5D = mu%0A X.attrs%5B'sigma'%5D = sigma%0A
50ba17b46c7fcc7eb42a48a5ec82e295fdbeae13
Add missing migration
migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py
migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py
Python
0.0002
@@ -0,0 +1,1858 @@ +%22%22%22Introduce DeployKey entity%0A%0ARevision ID: 25ecf1c9b3fb%0ARevises: 1c314d48261a%0ACreate Date: 2014-02-08 02:56:34.174597%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '25ecf1c9b3fb'%0Adown_revision = '1c314d48261a'%0A%0A%0Aimport sqlalchemy as sa%0Afrom alembic import op%0Afrom sqlalchemy.dialects import mysql%0A%0Afrom kozmic.models import db, DeployKey, Project%0A%0A%0Adef upgrade():%0A op.create_table('deploy_key',%0A sa.Column('id', sa.Integer(), nullable=False),%0A sa.Column('project_id', sa.Integer(), nullable=False),%0A sa.Column('gh_id', sa.Integer(), nullable=False),%0A sa.Column('rsa_private_key', sa.Text(), nullable=False),%0A sa.Column('rsa_public_key', sa.Text(), nullable=False),%0A sa.ForeignKeyConstraint(%5B'project_id'%5D, %5B'project.id'%5D, ),%0A sa.PrimaryKeyConstraint('id')%0A )%0A select = db.select(%5B'id', 'is_public', 'rsa_public_key', 'rsa_private_key', 'gh_key_id'%5D,%0A from_obj=Project.__tablename__)%0A for id, is_public, rsa_public_key, rsa_private_key, gh_key_id %5C%0A in db.session.execute(select).fetchall():%0A if is_public:%0A continue%0A insert = DeployKey.__table__.insert().values(%0A project_id=id,%0A rsa_public_key=rsa_public_key,%0A rsa_private_key=rsa_private_key,%0A gh_id=gh_key_id)%0A db.session.execute(insert)%0A db.session.commit()%0A op.drop_column(u'project', 'rsa_public_key')%0A op.drop_column(u'project', 'rsa_private_key')%0A op.drop_column(u'project', 'gh_key_id')%0A%0A%0Adef downgrade():%0A op.add_column(u'project', sa.Column('gh_key_id', mysql.INTEGER(display_width=11), nullable=False))%0A op.add_column(u'project', sa.Column('rsa_private_key', mysql.MEDIUMTEXT(), nullable=False))%0A op.add_column(u'project', sa.Column('rsa_public_key', mysql.MEDIUMTEXT(), nullable=False))%0A op.drop_table('deploy_key')%0A
0e833de83903c26fb3ca04c10b140c712350a12f
Create tests.py
unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py
unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py
Python
0.000001
@@ -0,0 +1,398 @@ +import unittest%0A%0A%0Aclass ConvertTemperatureTestCase(unittest.TestCase):%0A def test_fahrenheit_to_celsius(self):%0A self.assertEqual(convert_temperature(32, to='celsius'), 0)%0A%0A def test_celsius_to_fahrenheit(self):%0A self.assertEqual(convert_temperature(40, to='fahrenheit'), 104)%0A%0A def test_default_parameter_is_celsius(self):%0A self.assertEqual(convert_temperature(32), 0)%0A
6c6d3d365e021918fe88450136a75bbac7a21d5c
add .percol.d
.percol.d/rc.py
.percol.d/rc.py
Python
0.000005
@@ -0,0 +1,1321 @@ +# Emacs like%0Apercol.import_keymap(%7B%0A %22C-h%22 : lambda percol: percol.command.delete_backward_char(),%0A %22C-d%22 : lambda percol: percol.command.delete_forward_char(),%0A %22C-k%22 : lambda percol: percol.command.kill_end_of_line(),%0A %22C-y%22 : lambda percol: percol.command.yank(),%0A %22C-t%22 : lambda percol: percol.command.transpose_chars(),%0A %22C-a%22 : lambda percol: percol.command.beginning_of_line(),%0A %22C-e%22 : lambda percol: percol.command.end_of_line(),%0A %22C-b%22 : lambda percol: percol.command.backward_char(),%0A %22C-f%22 : lambda percol: percol.command.forward_char(),%0A %22M-f%22 : lambda percol: percol.command.forward_word(),%0A %22M-b%22 : lambda percol: percol.command.backward_word(),%0A %22M-d%22 : lambda percol: percol.command.delete_forward_word(),%0A %22M-h%22 : lambda percol: percol.command.delete_backward_word(),%0A %22C-n%22 : lambda percol: percol.command.select_next(),%0A %22C-p%22 : lambda percol: percol.command.select_previous(),%0A %22C-v%22 : lambda percol: percol.command.select_next_page(),%0A %22M-v%22 : lambda percol: percol.command.select_previous_page(),%0A %22M-%3C%22 : lambda percol: percol.command.select_top(),%0A %22M-%3E%22 : lambda percol: percol.command.select_bottom(),%0A %22C-m%22 : lambda percol: percol.finish(),%0A %22C-j%22 : lambda percol: percol.finish(),%0A %22C-g%22 : lambda percol: percol.cancel(),%0A%7D)%0A
d7595d6d80468ec5f0e4bde86db8a431c4384ad3
Solve 41.
041/solution.py
041/solution.py
Python
0.998706
@@ -0,0 +1,1221 @@ +# coding: utf-8%0A%0A%22%22%22 Project Euler problem #41. %22%22%22%0A%0Aimport math as mt%0A%0A%0Adef problem():%0A u%22%22%22 Solve the problem.%0A%0A We shall say that an n-digit number is pandigital if it makes use of all%0A the digits 1 to n exactly once. For example, 2143 is a 4-digit pandigital%0A and is also prime.%0A%0A What is the largest n-digit pandigital prime that exists?%0A%0A Solution: any integer divisible by 3 or 9 when sum of digits is divisible%0A by 3 or 9. So it's mean we could check only range (4321, 7654321), because%0A 1+2+3+4+5+6+7+8=36%0A%0A Answer: 7652413%0A%0A %22%22%22%0A for x in xrange(7654321, 4321, -2):%0A if is_pandigital(x) and is_prime(x):%0A return x%0A%0A%0Adef is_pandigital(*args):%0A %22%22%22 Check numbers is pandigital through 9. %22%22%22%0A return '123456789'.startswith(%0A ''.join(sorted(x for arg in args for x in str(arg))))%0A%0A%0Adef is_prime(num):%0A %22%22%22 Check number is prime. %22%22%22%0A if is_even(num) and num != 2 or num == 1:%0A return False%0A%0A for dd in range(3, int(mt.sqrt(num)) + 1):%0A if num %25 dd == 0:%0A return False%0A%0A return True%0A%0A%0Adef is_even(num):%0A %22%22%22 Check for number is even. %22%22%22%0A return num %25 2 == 0%0A%0A%0Aif __name__ == '__main__':%0A print problem()%0A
b395239526a4246193aa9ce0e541538e5690f408
Return change
python/reddit/least_bills.py
python/reddit/least_bills.py
Python
0.000015
@@ -0,0 +1,974 @@ +def bills_needed(money):%0A %22%22%22Determine optimal numbers of each bill denomination for amount.%0A%0A Args:%0A money, int: Amount of money to figure bills for%0A Returns:%0A cash, dict: Count of each type of bill needed for sum%0A %22%22%22%0A denominations = %5B1, 2, 5, 10, 20, 50, 100%5D%0A cash = %7B%7D%0A balance = money%0A bill_count = 0%0A if money %3E 0:%0A for denomination in sorted(denominations, reverse=True):%0A bills = balance // denomination%0A if bills %3E 0:%0A cash%5Bdenomination%5D = bills%0A bill_count += bills%0A balance = balance %25 denomination%0A return bill_count, cash%0A%0Adef test_bills_needed():%0A tests = %5B%0A 1,%0A 2,%0A 42,%0A 51,%0A 123,%0A 222,%0A 500,%0A %5D%0A for test in tests:%0A bill_count, cash = bills_needed(test)%0A print('Money: %7B%7D, Bills: %7B%7D - %7B%7D'.format(test, bill_count, cash))%0A%0Adef main():%0A test_bills_needed()%0A%0Amain()%0A
9bcb2566afa8191e24fb4f66b3fb882724ba4083
Test ragged getitem
thinc/tests/test_indexing.py
thinc/tests/test_indexing.py
Python
0
@@ -0,0 +1,1119 @@ +import pytest%0Aimport numpy%0Afrom numpy.testing import assert_allclose%0Afrom thinc.types import Ragged%0A%0A%[email protected]%0Adef ragged():%0A data = numpy.zeros((20, 4), dtype=%22f%22)%0A lengths = numpy.array(%5B4, 2, 8, 1, 4%5D, dtype=%22i%22)%0A data%5B0%5D = 0%0A data%5B1%5D = 1%0A data%5B2%5D = 2%0A data%5B3%5D = 3%0A data%5B4%5D = 4%0A data%5B5%5D = 5%0A return Ragged(data, lengths)%0A%0Adef test_ragged_starts_ends(ragged):%0A starts = ragged._get_starts()%0A ends = ragged._get_ends()%0A assert list(starts) == %5B0, 4, 6, 14, 15%5D%0A assert list(ends) == %5B4, 6, 14, 15, 19%5D%0A%0A%0Adef test_ragged_simple_index(ragged, i=1):%0A r = ragged%5Bi%5D%0A assert_allclose(r.data, ragged.data%5B4:6%5D)%0A assert_allclose(r.lengths, ragged.lengths%5Bi:i+1%5D)%0A%0A%0Adef test_ragged_slice_index(ragged, start=0, end=2):%0A r = ragged%5Bstart:end%5D%0A size = ragged.lengths%5Bstart:end%5D.sum()%0A assert r.data.shape == (size, r.data.shape%5B1%5D)%0A assert_allclose(r.lengths, ragged.lengths%5Bstart:end%5D)%0A%0A%0Adef test_ragged_array_index(ragged):%0A arr = numpy.array(%5B2, 1, 4%5D, dtype=%22i%22)%0A print(arr)%0A r = ragged%5Barr%5D%0A assert r.data.shape%5B0%5D == ragged.lengths%5Barr%5D.sum()%0A
f433cdb41f33a7b9daeaf276bf19d2617534e781
Add Tensor Flow
python/src/fft/fourier_nd.py
python/src/fft/fourier_nd.py
Python
0.000019
@@ -0,0 +1,1172 @@ +import numpy as np%0Aimport pylab as plt%0Aimport pandas as pd%0Afrom numpy import fft%0Aimport src.mylib.mfile as mfile%0A%0Adef bandpass_filter(x, freq, frequency_of_signal=0, band=0.1):%0A if (frequency_of_signal - band) %3C abs(freq) %3C (frequency_of_signal + band):%0A return x%0A else:%0A return 0%0A%0Ad1 = mfile.loadClose('JPY=X', '../db/forex.db')%0Ad2 = mfile.loadClose('GBP=X', '../db/forex.db')%0Ads = pd.concat(%5Bd1, d2%5D, axis=1, join='inner')%0Ax = ds.values%0Afor i in range(1):%0A x = x%5Bi:%5D%0A N = len(x)%0A spectrum = fft.fftn(x)%0A feq = fft.fftfreq(N) # frequencies%0A ampli = np.absolute(spectrum) # amplitude%0A phase = np.angle(spectrum) # phase%0A #print(phase)%0A index = np.argsort(-ampli, axis = 0)%0A sfreq = feq%5Bindex%5D%0A sampl = ampli%5Bindex%5D%0A #print(sampl%5B1:10%5D)%0A #sfreq = np.where(sfreq %3E 0)%0A #big = list(zip(*sfreq))%0A print(sfreq%5B1:10%5D * N)%0A plt.plot(sfreq * N, 'o')%0A#F_filtered = np.asanyarray(%5Bbandpass_filter(x, freq) for x, freq in zip(spectrum, feq)%5D)%0A#filtered_signal = np.fft.ifft(F_filtered)%0A%0A#plt.semilogy(feq%5B1:%5D, ampli%5B1:%5D), 'o') #zero feq is very large%0A#plt.semilogy(ampli%5B1:%5D)%0Aplt.legend()%0Aplt.show()%0A
470063b8d468394432e729e7417c88263614b5f0
Create msub_cluster.py
snakemake_ChIPseq_pipeline/msub_cluster.py
snakemake_ChIPseq_pipeline/msub_cluster.py
Python
0.000046
@@ -0,0 +1 @@ +%0A
a7b0fc1effd0e68018bc3c33f1dc0b952b23003b
update nav access restrictions
accelerator/migrations/0095_update_nav_tree_access_restrictions.py
accelerator/migrations/0095_update_nav_tree_access_restrictions.py
Python
0
@@ -0,0 +1,778 @@ +# Generated by Django 2.2.24 on 2022-03-29 16:24%0A%0Afrom django.db import migrations%0A%0A%0Adef update_nav_access_restrictions(apps, schema_editor):%0A NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')%0A urls = %5B'/judging/panel/', '/judging/commitments/', '/nav/judging'%5D%0A nav_items = NavTreeItem.objects.filter(url__in=urls)%0A # remove user_type restrictions%0A nav_items.update(user_type='')%0A # remove user_role restrictions%0A %5Bnav_item.user_role.clear() for nav_item in nav_items%5D%0A%0A%0Aclass Migration(migrations.Migration):%0A dependencies = %5B%0A ('accelerator', '0094_alter_startup_organization_fields'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(update_nav_access_restrictions,%0A migrations.RunPython.noop)%0A %5D%0A
645efb8ffcc3c9a3e41db2619430ffcb7a6d570f
Migrate Req/Ass to have verified/finished date
src/ggrc/migrations/versions/20160314140338_4fd36860d196_add_finished_date_to_request_and_.py
src/ggrc/migrations/versions/20160314140338_4fd36860d196_add_finished_date_to_request_and_.py
Python
0
@@ -0,0 +1,1519 @@ +# Copyright (C) 2016 Google Inc., authors, and contributors %3Csee AUTHORS file%3E%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A# Created By: [email protected]%0A# Maintained By: [email protected]%0A%0A%22%22%22%0Aadd finished date to request and assessment%0A%0ACreate Date: 2016-03-14 14:03:38.026877%0A%22%22%22%0A# disable Invalid constant name pylint warning for mandatory Alembic variables.%0A# pylint: disable=invalid-name%0A%0Aimport sqlalchemy as sa%0A%0Afrom alembic import op%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '4fd36860d196'%0Adown_revision = '39aec99639d5'%0A%0A%0Adef upgrade_table(table):%0A %22%22%22Add columns finished_date and verified_date and populate them.%22%22%22%0A op.add_column(table,%0A sa.Column('finished_date', sa.DateTime(), nullable=True))%0A op.add_column(table,%0A sa.Column('verified_date', sa.DateTime(), nullable=True))%0A op.execute(%22%22%22%0A UPDATE %7B%7D%0A SET finished_date = updated_at%0A WHERE status in (%22Finished%22, %22Verified%22, %22Final%22)%0A %22%22%22.format(table))%0A op.execute(%22%22%22%0A UPDATE %7B%7D%0A SET verified_date = updated_at, status = %22Final%22%0A WHERE status = %22Verified%22%0A %22%22%22.format(table))%0A%0A%0Adef upgrade():%0A upgrade_table('requests')%0A upgrade_table('assessments')%0A%0A%0Adef downgrade():%0A %22%22%22Remove verified_date and finished_date columns.%22%22%22%0A op.drop_column('assessments', 'verified_date')%0A op.drop_column('assessments', 'finished_date')%0A op.drop_column('requests', 'verified_date')%0A op.drop_column('requests', 'finished_date')%0A
bad82f3c77dbeebdc2332d193f0c8290c5186862
add rudimentary api
frontend/app.py
frontend/app.py
Python
0.000007
@@ -0,0 +1,1541 @@ +import os%0Aimport sys%0Aimport inspect%0Aimport json%0Afrom flask import Flask%0A%0A# move to top level%0Acurrentdir = os.path.dirname(%0A os.path.abspath(inspect.getfile(inspect.currentframe())))%0Aparentdir = os.path.dirname(currentdir)%0Asys.path.insert(0, parentdir)%0A%0Afrom api.reporting import Report%0Afrom api.apiutils import Scope, Relation%0A%0Afrom modelstore.elasticstore import StoreHandler%0Afrom knowledgerepr import fieldnetwork%0Afrom algebra import API%0A%0Apath_to_serialized_model = parentdir + %22/test/testmodel/%22%0Anetwork = fieldnetwork.deserialize_network(path_to_serialized_model)%0Astore_client = StoreHandler()%0A%0Aapi = API(network, store_client)%0A%0Akeyword_search = api.keyword_search%0Aneighbor_search = api.neighbor_search%0Aunion = api.union%0Aintersection = api.intersection%0Adifference = api.difference%0A%0Adb = Scope.DB%0Asource = Scope.SOURCE%0Afeld = Scope.FIELD%0Acontent = Scope.CONTENT%0A%0Aschema = Relation.SCHEMA%0Aschema_sim = Relation.SCHEMA_SIM%0Acontent_sim = Relation.CONTENT_SIM%0Aentity_sim = Relation.ENTITY_SIM%0Apkfk = Relation.PKFK%0A%0Aapp = Flask(__name__)%0A%[email protected]('/query/%3Cquery%3E')%0Adef query(query):%0A try:%0A res = eval(query)%0A res = json.dumps(res.data)%0A except Exception as e:%0A res = %22error: %22 + str(e)%0A return res%0A%0A%[email protected]('/convert/%3Cnid%3E')%0Adef convert(nid):%0A try:%0A import pdb; pdb.set_trace()%0A nid = int(nid)%0A res = api._general_to_drs(nid)%0A res = json.dumps(res.data)%0A except Exception as e:%0A res = %22error: %22 + str(e)%0A return res%0A%0A%0Aif __name__ == '__main__':%0A app.run()%0A
37baa669ed1e00fabddd33478fa75f4047075ce3
Create Python object detection script.
cs473vision/ObjectDetector.py
cs473vision/ObjectDetector.py
Python
0
@@ -0,0 +1,1092 @@ +'''%0ACreated on Feb 28, 2014%0A%0A@author: Vance Zuo%0A'''%0A%0Aimport numpy%0Aimport cv2%0A%0Aclass ObjectDetector(object):%0A '''%0A classdocs%0A '''%0A%0A%0A def __init__(self, params):%0A '''%0A Constructor%0A '''%0A self.bg_img = None%0A self.fg_img = None%0A return%0A %0A def load_image(self, bg_path, fg_path):%0A self.bg_img = cv2.imread(bg_path)%0A self.fg_img = cv2.imread(fg_path)%0A return True%0A %0A def subtract_background(self):%0A # Take simple difference%0A naive = cv2.absdiff(self.bg_img, self.bg_img)%0A cv2.imwrite(%22naive.png%22, naive)%0A %0A # MOG Subtraction%0A bg_subtractor = cv2.BackgroundSubtractorMOG()%0A bg_mask = bg_subtractor.apply(self.bg_img)%0A fg_mask = bg_subtractor.apply(self.fg_img)%0A cv2.imwrite(%22MOG.png%22, fg_mask)%0A %0A # MOG2 Subtraction%0A bg_subtractor = cv2.BackgroundSubtractorMOG2()%0A bg_mask = bg_subtractor.apply(self.bg_img)%0A fg_mask = bg_subtractor.apply(self.fg_img)%0A cv2.imwrite(%22MOG2.png%22, fg_mask)%0A return
4aacc8d55c138c405d561bbf9ddfd9ddab483e62
add wxPython example.
trypython/extlib/gui/wx01.py
trypython/extlib/gui/wx01.py
Python
0
@@ -0,0 +1,443 @@ +%22%22%22%0AwxPython %E3%81%AB%E3%81%A4%E3%81%84%E3%81%A6%E3%81%AE%E3%82%B5%E3%83%B3%E3%83%97%E3%83%AB%E3%81%A7%E3%81%99%0A%0A%E3%81%8A%E6%B1%BA%E3%81%BE%E3%82%8A%E3%81%AE Hello world %E3%81%AB%E3%81%A4%E3%81%84%E3%81%A6%0A%0AREFERENCES:: http://bit.ly/2OcHRh7%0A%22%22%22%0A# noinspection PyPackageRequirements%0Aimport wx%0A%0Afrom trypython.common.commoncls import SampleBase%0A%0A%0Aclass Sample(SampleBase):%0A def exec(self):%0A app = wx.App()%0A%0A frm = wx.Frame(parent=None, title='Hello World')%0A frm.Show()%0A%0A app.MainLoop()%0A%0A%0Adef go():%0A obj = Sample()%0A obj.exec()%0A%0A%0Aif __name__ == '__main__':%0A go()%0A
9566d71a267015005ad937cd92bd105d2ffff274
print users with multiple matching urls
twnews/print_urls_by_user.py
twnews/print_urls_by_user.py
Python
0.000006
@@ -0,0 +1,876 @@ +%22%22%22%0APrint users who were found for multiple urls.%0A%0Auser url1 score1 url2 score2 ...%0A%22%22%22%0Aimport codecs%0Afrom collections import defaultdict%0Aimport json%0Aimport sys%0A%0Afrom . import __data__%0A%0Asys.stdout = codecs.getwriter('utf8')(sys.stdout)%0A%0A%0Adef print_urls_by_user(tweets_file=__data__ + '/tweets.json'):%0A user2urls = defaultdict(lambda: set())%0A url2score = defaultdict(lambda: 0.)%0A inf = codecs.open(tweets_file, 'rt', 'utf-8')%0A for line in inf:%0A js = json.loads(line)%0A if 'url_query' in js: # valid line%0A user2urls%5Bjs%5B'user'%5D%5B'screen_name'%5D%5D.add(js%5B'url_query'%5D)%0A url2score%5Bjs%5B'url_query'%5D%5D = float(js%5B'url_score'%5D)%0A for user, urls in user2urls.iteritems():%0A if len(urls) %3E 1:%0A print user + '%5Ct' + '%5Ct'.join('%25s%5Ct%25.3f' %25 (u, url2score%5Bu%5D) for u in urls)%0A%0A%0Aif __name__ == '__main__':%0A print_urls_by_user()%0A
bc9c057d57d4dbc2e3c70eaf3ac182df2b334107
fix bluffing again
player.py
player.py
import logging import card as c class Player(object): """ This class represents a player. It is basically a doubly-linked ring list with the option to reverse the direction. On initialization, it will connect itself to a game and its other players by placing itself behind the current player. """ def __init__(self, game, user): self.cards = list() self.game = game self.user = user self.logger = logging.getLogger(__name__) # Check if this player is the first player in this game. if game.current_player: self.next = game.current_player self.prev = game.current_player.prev game.current_player.prev.next = self game.current_player.prev = self else: self._next = self self._prev = self game.current_player = self for i in range(7): self.cards.append(self.game.deck.draw()) self.bluffing = False self.drew = False def leave(self): """ Leave the current game """ self.next.prev = self.prev self.prev.next = self.next self.next = None self.prev = None def __repr__(self): return repr(self.user) def __str__(self): return str(self.user) @property def next(self): return self._next if not self.game.reversed else self._prev @next.setter def next(self, player): if not self.game.reversed: self._next = player else: self._prev = player @property def prev(self): return self._prev if not self.game.reversed else self._next @prev.setter def prev(self, player): if not self.game.reversed: self._prev = player else: self._next = player def playable_cards(self): """ Returns a list of the cards this player can play right now """ playable = list() last = self.game.last_card self.logger.debug("Last card was " + str(last)) cards = self.cards if self.drew: cards = self.cards[-1:] for card in cards: if self.card_playable(card, playable): self.logger.debug("Matching!") playable.append(card) # You may only play a +4 if it's the only card you can play self.bluffing = bool(len(playable) - 1) # You may not play a chooser or +4 as your last card if len(self.cards) == 1 and (self.cards[0].special == c.DRAW_FOUR or self.cards[0].special == c.CHOOSE): return list() return playable def card_playable(self, card, playable): """ Check a single card if it can be played """ is_playable = True last = self.game.last_card self.logger.debug("Checking card " + str(card)) if (card.color != last.color and card.value != last.value and not card.special): self.logger.debug("Card's color or value doesn't match") is_playable = False if last.value == c.DRAW_TWO and not \ card.value == c.DRAW_TWO and self.game.draw_counter: self.logger.debug("Player has to draw and can't counter") is_playable = False if last.special == c.DRAW_FOUR and self.game.draw_counter: self.logger.debug("Player has to draw and can't counter") is_playable = False if (last.special == c.CHOOSE or last.special == c.DRAW_FOUR) and \ (card.special == c.CHOOSE or card.special == c.DRAW_FOUR): self.logger.debug("Can't play colorchooser on another one") is_playable = False if not last.color or card in playable: self.logger.debug("Last card has no color or the card was " "already added to the list") is_playable = False return is_playable
Python
0.000009
@@ -2348,87 +2348,206 @@ if -it's the only card you can play%0A self.bluffing = bool(len(playable) - 1) +you have no cards of the correct color%0A self.bluffing = False%0A for card in playable:%0A if card.color == last.color:%0A self.bluffing = True%0A break %0A%0A @@ -2678,16 +2678,19 @@ RAW_FOUR + or %0A @@ -2719,19 +2719,16 @@ -or self.car
e5627134d9a2c052a523f66a2ec9867b3432fae2
Test Issue #461: ent_iob tag incorrect after setting entities.
spacy/tests/tokens/test_add_entities.py
spacy/tests/tokens/test_add_entities.py
Python
0
@@ -0,0 +1,1115 @@ +from __future__ import unicode_literals%0Aimport spacy%0Afrom spacy.vocab import Vocab%0Afrom spacy.matcher import Matcher%0Afrom spacy.tokens.doc import Doc%0Afrom spacy.attrs import *%0Afrom spacy.pipeline import EntityRecognizer%0A%0Aimport pytest%0A%0A%[email protected](scope=%22module%22)%0Adef en_vocab():%0A return spacy.get_lang_class('en').Defaults.create_vocab()%0A%0A%[email protected](scope=%22module%22)%0Adef entity_recognizer(en_vocab):%0A return EntityRecognizer(en_vocab, features=%5B(2,), (3,)%5D)%0A%[email protected]%0Adef animal(en_vocab):%0A return nlp.vocab.strings%5Bu%22ANIMAL%22%5D%0A%0A%[email protected]%0Adef doc(en_vocab, entity_recognizer):%0A doc = Doc(en_vocab, words=%5Bu%22this%22, u%22is%22, u%22a%22, u%22lion%22%5D)%0A entity_recognizer(doc)%0A return doc%0A%0A%0Adef test_set_ents_iob(doc):%0A assert len(list(doc.ents)) == 0%0A tags = %5Bw.ent_iob_ for w in doc%5D%0A assert tags == (%5B'O'%5D * len(doc))%0A doc.ents = %5B(doc.vocab.strings%5B'ANIMAL'%5D, 3, 4)%5D%0A tags = %5Bw.ent_iob_ for w in doc%5D%0A assert tags == %5B'O', 'O', 'O', 'B'%5D%0A doc.ents = %5B(doc.vocab.strings%5B'WORD'%5D, 0, 2)%5D%0A tags = %5Bw.ent_iob_ for w in doc%5D%0A assert tags == %5B'B', 'I', 'O', 'O'%5D%0A
011c01b1d81f7eed22eaf5418c60016597309789
Update fetching CA definitions for assessments
src/ggrc/converters/handlers/custom_attribute.py
src/ggrc/converters/handlers/custom_attribute.py
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] """Handlers used for custom attribute columns.""" from dateutil.parser import parse from ggrc import db from ggrc import models from ggrc.converters import errors from ggrc.converters.handlers import handlers _types = models.CustomAttributeDefinition.ValidTypes class CustomAttributeColumHandler(handlers.TextColumnHandler): """Custom attribute column handler This is a handler for all types of custom attribute column. It works with any custom attribute definition and with mondatory flag on or off. """ _type_handlers = { _types.TEXT: lambda self: self.get_text_value(), _types.DATE: lambda self: self.get_date_value(), _types.DROPDOWN: lambda self: self.get_dropdown_value(), _types.CHECKBOX: lambda self: self.get_checkbox_value(), _types.RICH_TEXT: lambda self: self.get_rich_text_value(), _types.MAP: lambda self: self.get_person_value(), } def parse_item(self): """Parse raw value from csv file Returns: CustomAttributeValue with the correct definition type and value. """ self.definition = self.get_ca_definition() value = models.CustomAttributeValue(custom_attribute_id=self.definition.id) typ = self.definition.attribute_type.split(":")[0] value_handler = self._type_handlers[typ] value.attribute_value = value_handler(self) if isinstance(value.attribute_value, models.mixins.Identifiable): obj = value.attribute_value value.attribute_value = obj.__class__.__name__ value.attribute_object_id = obj.id if value.attribute_value is None: return None return value def get_value(self): """Return the value of the custom attrbute field. Returns: Text representation if the custom attribute value if it exists, otherwise None. """ definition = self.get_ca_definition() if not definition: return "" for value in self.row_converter.obj.custom_attribute_values: if value.custom_attribute_id == definition.id: if value.custom_attribute.attribute_type.startswith("Map:"): obj = value.attribute_object return getattr(obj, "email", getattr(obj, "slug", None)) return value.attribute_value return None def set_obj_attr(self): if self.value: self.row_converter.obj.custom_attribute_values.append(self.value) def insert_object(self): """Add custom attribute objects to db session.""" if self.dry_run or self.value is None: return self.value.attributable_type = self.row_converter.obj.__class__.__name__ self.value.attributable_id = self.row_converter.obj.id db.session.add(self.value) self.dry_run = True def get_date_value(self): """Get date value from input string date.""" if not self.mandatory and self.raw_value == "": return None # ignore empty fields value = None try: value = parse(self.raw_value) except (TypeError, ValueError): self.add_warning(errors.WRONG_VALUE, column_name=self.display_name) if self.mandatory and value is None: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return value def get_checkbox_value(self): if not self.mandatory and self.raw_value == "": return None # ignore empty fields value = self.raw_value.lower() in ("yes", "true") if self.raw_value.lower() not in ("yes", "true", "no", "false"): self.add_warning(errors.WRONG_VALUE, column_name=self.display_name) value = None if self.mandatory and value is None: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return value def get_dropdown_value(self): choices_list = self.definition.multi_choice_options.split(",") valid_choices = [val.strip() for val in choices_list] choice_map = {choice.lower(): choice for choice in valid_choices} value = choice_map.get(self.raw_value.lower()) if value is None and self.raw_value != "": self.add_warning(errors.WRONG_VALUE, column_name=self.display_name) if self.mandatory and value is None: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return value def get_text_value(self): if not self.mandatory and self.raw_value == "": return None # ignore empty fields value = self.clean_whitespaces(self.raw_value) if self.mandatory and not value: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return value def get_rich_text_value(self): if not self.mandatory and self.raw_value == "": return None # ignore empty fields if self.mandatory and not self.raw_value: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return self.raw_value def get_person_value(self): """Fetch a person based on the email text in column. Returns: Person model instance """ if not self.mandatory and self.raw_value == "": return None # ignore empty fields if self.mandatory and not self.raw_value: self.add_error(errors.MISSING_VALUE_ERROR, column_name=self.display_name) return value = models.Person.query.filter_by(email=self.raw_value).first() if self.mandatory and not value: self.add_error(errors.WRONG_VALUE, column_name=self.display_name) return value def get_ca_definition(self): """Get custom attribute definition.""" return self.row_converter.block_converter.ca_definitions_cache.get( (self.row_converter.obj.id, self.display_name)) class ObjectCaColumnHandler(CustomAttributeColumHandler): """Handler for object level custom attributes.""" def set_value(self): pass def set_obj_attr(self): if self.dry_run: return self.value = self.parse_item() if self.value: self.row_converter.obj.custom_attribute_values.append(self.value) def get_ca_definition(self): """Get custom attribute definition for a specific object.""" if self.row_converter.obj.id is None: return None return self.row_converter.block_converter.ca_definitions_cache.get( (self.row_converter.obj.id, self.display_name))
Python
0
@@ -319,16 +319,45 @@ parse%0A%0A +from sqlalchemy import and_%0A%0A from ggr @@ -6280,30 +6280,79 @@ rn None%0A -return +cad = models.CustomAttributeDefinition%0A definition = self.row_co @@ -6437,28 +6437,231 @@ obj.id, self.display_name))%0A + if not definition:%0A definition = cad.query.filter(and_(%0A cad.definition_id == self.row_converter.obj.id,%0A cad.title == self.display_name%0A )).first()%0A return definition%0A
7cee0980a67b827e4cf06c15e0f1c3b412f68c22
Create main.py to actually perform the test
main.py
main.py
Python
0
@@ -0,0 +1,426 @@ +import io%0Aimport argparse%0A%0Adef initialize_argument_parser():%0A parser = argparse.ArgumentParser(description='Simulate Indian health solutions')%0A parser.add_argument('-s', '--solution', dest='solution', %0A help='the solution to test', default='health kiosk')%0A return vars(parser.parse_args())%0A%0Aif __name__ == %22__main__%22:%0A args = initialize_argument_parser()%0A print 'Solution to test:', args%5B'solution'%5D%0A
361c3496274a960c5e927899a39618f8fee9db0a
Add Basic Trellis Histogram to Examples
altair/vegalite/v2/examples/trellis_histogram.py
altair/vegalite/v2/examples/trellis_histogram.py
Python
0
@@ -0,0 +1,444 @@ +%22%22%22%0ATrellis Histogram%0A-----------------%0AThis example shows how to make a basic trellis histogram.%0Ahttps://vega.github.io/vega-lite/examples/trellis_bar_histogram.html%0A%22%22%22%0Aimport altair as alt%0A%0Acars = alt.load_dataset('cars')%0A%0Achart = alt.Chart(cars).mark_bar().encode(%0A x=alt.X(%22Horsepower%22,%0A type=%22quantitative%22,%0A bin=alt.BinTransform(%0A maxbins=15%0A )),%0A y='count(*):Q',%0A row='Origin'%0A)%0A
333453fe6a74d7bada941ee7aeed3660452efcaf
add tests
tests/install_tests/test_cupy_builder/test_command.py
tests/install_tests/test_cupy_builder/test_command.py
Python
0
@@ -0,0 +1,646 @@ +from cupy_builder._command import filter_files_by_extension%0A%0A%0Adef test_filter_files_by_extension():%0A sources_cpp = %5B'a.cpp', 'b.cpp'%5D%0A sources_pyx = %5B'c.pyx'%5D%0A sources = sources_cpp + sources_pyx%0A assert filter_files_by_extension(%0A sources, '.cpp') == (sources_cpp, sources_pyx)%0A assert filter_files_by_extension(%0A sources, '.pyx') == (sources_pyx, sources_cpp)%0A assert filter_files_by_extension(%0A sources, '.cu') == (%5B%5D, sources)%0A assert filter_files_by_extension(%0A sources_cpp, '.cpp') == (sources_cpp, %5B%5D)%0A assert filter_files_by_extension(%0A sources_cpp, '.pyx') == (%5B%5D, sources_cpp)%0A
76f5a60a9b9217519be132ad7fc86eb439ae9a3f
Fix `TypeError` in Python-3.2
powerline/bindings/zsh/__init__.py
powerline/bindings/zsh/__init__.py
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import atexit from weakref import WeakValueDictionary, ref import zsh from powerline.shell import ShellPowerline from powerline.lib.overrides import parsedotval, parse_override_var from powerline.lib.unicode import unicode, u from powerline.lib.encoding import (get_preferred_output_encoding, get_preferred_environment_encoding) from powerline.lib.dict import mergeargs used_powerlines = WeakValueDictionary() def shutdown(): for powerline in tuple(used_powerlines.values()): powerline.shutdown() def get_var_config(var): try: val = zsh.getvalue(var) if isinstance(val, dict): return mergeargs([parsedotval((u(k), u(v))) for k, v in val.items()]) elif isinstance(val, (unicode, str, bytes)): return mergeargs(parse_override_var(u(val))) else: return None except: return None class Args(object): __slots__ = ('last_pipe_status', 'last_exit_code') ext = ['shell'] renderer_module = '.zsh' @property def config_override(self): return get_var_config('POWERLINE_CONFIG_OVERRIDES') @property def theme_override(self): return get_var_config('POWERLINE_THEME_OVERRIDES') @property def config_path(self): try: ret = zsh.getvalue('POWERLINE_CONFIG_PATHS') except IndexError: return None else: if isinstance(ret, (unicode, str, bytes)): return ret.split(type(ret)(':')) else: return ret @property def jobnum(self): return zsh.getvalue('_POWERLINE_JOBNUM') def string(s): if type(s) is bytes: return s.decode(get_preferred_environment_encoding(), 'replace') else: return str(s) class Environment(object): @staticmethod def __getitem__(key): try: return string(zsh.getvalue(key)) except IndexError as e: raise KeyError(*e.args) @staticmethod def get(key, default=None): try: return string(zsh.getvalue(key)) except IndexError: return default @staticmethod def __contains__(key): try: zsh.getvalue(key) return True except IndexError: return False environ = getattr(zsh, 'environ', Environment()) class ZshPowerline(ShellPowerline): def init(self, **kwargs): super(ZshPowerline, self).init(Args(), **kwargs) def precmd(self): self.args.last_pipe_status = zsh.pipestatus() self.args.last_exit_code = zsh.last_exit_code() def do_setup(self, zsh_globals): set_prompt(self, 'PS1', 'left', None, above=True) set_prompt(self, 'RPS1', 'right', None) set_prompt(self, 'PS2', 'left', 'continuation') set_prompt(self, 'RPS2', 'right', 'continuation') set_prompt(self, 'PS3', 'left', 'select') used_powerlines[id(self)] = self zsh_globals['_powerline'] = self class Prompt(object): __slots__ = ('powerline', 'side', 'savedpsvar', 'savedps', 'args', 'theme', 'above', '__weakref__') def __init__(self, powerline, side, theme, savedpsvar=None, savedps=None, above=False): self.powerline = powerline self.side = side self.above = above self.savedpsvar = savedpsvar self.savedps = savedps self.args = powerline.args self.theme = theme def __str__(self): zsh.eval('_POWERLINE_PARSER_STATE="${(%):-%_}"') zsh.eval('_POWERLINE_SHORTENED_PATH="${(%):-%~}"') try: mode = u(zsh.getvalue('_POWERLINE_MODE')) except IndexError: mode = None segment_info = { 'args': self.args, 'environ': environ, 'client_id': 1, 'local_theme': self.theme, 'parser_state': zsh.getvalue('_POWERLINE_PARSER_STATE'), 'shortened_path': zsh.getvalue('_POWERLINE_SHORTENED_PATH'), 'mode': mode, } zsh.setvalue('_POWERLINE_PARSER_STATE', None) zsh.setvalue('_POWERLINE_SHORTENED_PATH', None) try: zle_rprompt_indent = zsh.getvalue('ZLE_RPROMPT_INDENT') except IndexError: zle_rprompt_indent = 1 r = '' if self.above: for line in self.powerline.render_above_lines( width=zsh.columns() - zle_rprompt_indent, segment_info=segment_info, ): r += line + '\n' r += self.powerline.render( width=zsh.columns(), side=self.side, segment_info=segment_info, mode=mode, ) if type(r) is not str: if type(r) is bytes: return r.decode(get_preferred_output_encoding(), 'replace') else: return r.encode(get_preferred_output_encoding(), 'replace') return r def __del__(self): if self.savedps: zsh.setvalue(self.savedpsvar, self.savedps) self.powerline.shutdown() def set_prompt(powerline, psvar, side, theme, above=False): try: savedps = zsh.getvalue(psvar) except IndexError: savedps = None zpyvar = 'ZPYTHON_POWERLINE_' + psvar prompt = Prompt(powerline, side, theme, psvar, savedps, above) zsh.eval('unset ' + zpyvar) zsh.set_special_string(zpyvar, prompt) zsh.setvalue(psvar, '${' + zpyvar + '}') return ref(prompt) def reload(): for powerline in tuple(used_powerlines.values()): powerline.reload() def reload_config(): for powerline in used_powerlines.values(): powerline.create_renderer(load_main=True, load_colors=True, load_colorscheme=True, load_theme=True) def setup(zsh_globals): powerline = ZshPowerline() powerline.setup(zsh_globals) atexit.register(shutdown)
Python
0.998627
@@ -1462,18 +1462,45 @@ lit( -type(ret)( +(b':' if isinstance(ret, bytes) else ':')
8a7963644ff470fc7da8bc22a7f3fd6ef1be3ed2
define password generator.
rio/utils/token.py
rio/utils/token.py
Python
0.000001
@@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%0Ario.utils.token%0A~~~~~~~~~~~~~~~%0A%22%22%22%0A%0Aimport random%0Aimport string%0A%0Adef password_generator(length):%0A %22%22%22Generate a random password.%0A%0A :param length: integer.%0A %22%22%22%0A return ''.join(random.choice(string.ascii_lowercase + string.digits)%0A for _ in range(length))%0A
1d4397860ffd297eb02b5f4b96b0e18a3c7e12cd
Add test recipes.
badgify/tests/recipes.py
badgify/tests/recipes.py
Python
0
@@ -0,0 +1,748 @@ +# -*- coding: utf-8 -*-%0Afrom ..recipe import BaseRecipe%0Afrom ..compat import get_user_model%0A%0A%0Aclass BadRecipe(object):%0A pass%0A%0A%0Aclass NotImplementedRecipe(BaseRecipe):%0A pass%0A%0A%0Aclass Recipe1(BaseRecipe):%0A name = 'Recipe 1'%0A slug = 'recipe1'%0A description = 'Recipe 1 description'%0A%0A @property%0A def image(self):%0A return 'image'%0A%0A @property%0A def user_ids(self):%0A return (get_user_model().objects.filter(love_python=True)%0A .values_list('id', flat=True))%0A%0A%0Aclass Recipe2(BaseRecipe):%0A name = 'Recipe 2'%0A slug = 'recipe2'%0A description = 'Recipe 2 description'%0A%0A @property%0A def image(self):%0A return 'image'%0A%0A @property%0A def user_ids(self):%0A return %5B%5D%0A
f6c07ad620f7e3ef73fdad5f2d97d1dd911e390f
Create signal.py
signal.py
signal.py
Python
0.000002
@@ -0,0 +1,854 @@ +#!/usr/bin/python3%0A# Send via Signal%0A%0Aimport os%0Afrom pydbus import SystemBus%0Afrom dateutil import parser%0Afrom datetime import datetime%0A%0Aeventhostname=os.environ.get (%22NOTIFY_HOSTNAME%22, %22%3Cno host%3E%22)%0AeventdatetimeString=os.environ.get (%22NOTIFY_LONGDATETIME%22, %22%3Cno time%3E%22)%0Aif eventdatetimeString == %22%3Cno time%3E%22:%0A eventdatetime = datetime(2017,1,1)%0Aelse:%0A eventdatetime = parser.parse (eventdatetimeString)%0A%0Aeventduration=os.environ.get (%22NOTIFY_LASTHOSTSTATECHANGE_REL%22, %22%3Cno duration%3E%22)%0Aeventhoststate=os.environ.get (%22NOTIFY_HOSTSTATE%22, %22%3Cno state%3E%22)%0Aeventlasthoststate=os.environ.get (%22NOTIFY_LASTHOSTSTATE%22, %22%3Cno last state%3E%22)%0A%0Abus = SystemBus()%0Asignal = bus.get (%22org.asamk.Signal%22)%0A%0Amessage = eventhostname + %22: %22 + eventhoststate + %22 after %22 + eventlasthoststate + %22 (%22 + eventdurati$%0A%0Asignal.sendMessage(message, %5B%5D, %5B'+XXXXXXXXXXXXXXXXXXX'%5D)%0A
269e9821a52f1d68e2a48beb76b20c227dc84b55
add 0007(count one file omitting multiple-line comment)
robot527/0007/count_codes.py
robot527/0007/count_codes.py
Python
0
@@ -0,0 +1,1455 @@ +#! usr/bin/python3%0A%22%22%22%0A%E7%AC%AC 0007 %E9%A2%98%EF%BC%9A%E6%9C%89%E4%B8%AA%E7%9B%AE%E5%BD%95%EF%BC%8C%E9%87%8C%E9%9D%A2%E6%98%AF%E4%BD%A0%E8%87%AA%E5%B7%B1%E5%86%99%E8%BF%87%E7%9A%84%E7%A8%8B%E5%BA%8F%EF%BC%8C%E7%BB%9F%E8%AE%A1%E4%B8%80%E4%B8%8B%E4%BD%A0%E5%86%99%E8%BF%87%E5%A4%9A%E5%B0%91%E8%A1%8C%E4%BB%A3%E7%A0%81%E3%80%82%0A%E5%8C%85%E6%8B%AC%E7%A9%BA%E8%A1%8C%E5%92%8C%E6%B3%A8%E9%87%8A%EF%BC%8C%E4%BD%86%E6%98%AF%E8%A6%81%E5%88%86%E5%88%AB%E5%88%97%E5%87%BA%E6%9D%A5%E3%80%82%0A%22%22%22%0A%0Aclass PyfileInfo:%0A%0A def __init__(self, file):%0A self.file_name = file%0A self.total_line_num = 0%0A self.blank_line_num = 0%0A self.comment_line_num = 0%0A %0A def count_lines(self):%0A if self.file_name%5B-3:%5D != '.py':%0A print(self.file_name + ' is not a .py file!')%0A return%0A try:%0A with open(self.file_name) as code:%0A for each_line in code:%0A self.total_line_num += 1%0A temp = each_line.strip()%0A if temp == '':%0A self.blank_line_num += 1%0A elif temp%5B0%5D == '#':%0A self.comment_line_num += 1%0A %0A except IOError as err:%0A print('File error: ' + str(err))%0A%0Aimport os%0Atarget_path = '.'%0Afile_list = %5Bf for f in os.listdir(target_path)%0A if os.path.isfile(os.path.join(target_path, f))%5D%0A#print(file_list, len(file_list))%0A%0Apyfile_list = %5Bos.path.join(target_path, f) for f in file_list%0A if f%5B-3:%5D == '.py'%5D%0A%0Aprint(pyfile_list%5B0%5D)%0A%0Apyf1 = PyfileInfo(pyfile_list%5B0%5D)%0Apyf1.count_lines()%0A#pyf2 = PyfileInfo('test.c')%0A#pyf2.count_lines()%0A%0Aprint('==' * 18)%0Aprint('Total line number is:', pyf1.total_line_num)%0Aprint('Blank line number is:', pyf1.blank_line_num)%0Aprint('Comment line number is:', pyf1.comment_line_num)%0A
74d8390dce4dd5a8fe8a6f7f4304b80afadfec1d
Add basic calendar framework
app/calender.py
app/calender.py
Python
0.000002
@@ -0,0 +1,320 @@ +class CalenderManager:%0A def __init__(self):%0A pass%0A %0A def add_event(self, user, starttime, endtime):%0A pass%0A %0A def get_current_event(self):%0A #Return NONE if no event%0A pass%0A %0A def remove_event(self, id):%0A pass%0A %0A def get_upcoming_events(self, num):%0A pass
68e4b7628da642799d5fccec8ae96d875d1d8089
Fix for id not showing up in verbose lists of concepts
django-nonrel/ocl/concepts/serializers.py
django-nonrel/ocl/concepts/serializers.py
from django.core.validators import RegexValidator from rest_framework import serializers from collection.fields import ConceptReferenceField from concepts.fields import LocalizedTextListField from concepts.models import Concept, ConceptVersion, ConceptReference from oclapi.fields import HyperlinkedRelatedField from oclapi.models import NAMESPACE_REGEX from oclapi.serializers import ResourceVersionSerializer from sources.models import Source class ConceptListSerializer(serializers.Serializer): id = serializers.CharField(source='mnemonic') concept_class = serializers.CharField() datatype = serializers.CharField() url = serializers.URLField() retired = serializers.BooleanField() source = serializers.CharField(source='parent_resource') owner = serializers.CharField(source='owner_name') owner_type = serializers.CharField() owner_url = serializers.URLField() display_name = serializers.CharField() display_locale = serializers.CharField() class Meta: model = Concept class ConceptDetailSerializer(serializers.Serializer): concept_class = serializers.CharField() datatype = serializers.CharField() display_name = serializers.CharField() display_locale = serializers.CharField() url = serializers.URLField() names = LocalizedTextListField() descriptions = LocalizedTextListField() extras = serializers.WritableField() retired = serializers.BooleanField() source = serializers.CharField(source='parent_resource') owner = serializers.CharField(source='owner_name') owner_type = serializers.CharField() owner_url = serializers.URLField() class Meta: model = Concept class ConceptCreateSerializer(serializers.Serializer): id = serializers.CharField(required=True, validators=[RegexValidator(regex=NAMESPACE_REGEX)], source='mnemonic') concept_class = serializers.CharField() datatype = serializers.CharField(required=False) names = LocalizedTextListField(required=True) descriptions = LocalizedTextListField(required=False) extras = serializers.WritableField(required=False) class Meta: model = Concept lookup_field = 'mnemonic' def restore_object(self, attrs, instance=None): concept = instance if instance else Concept() concept.mnemonic = attrs.get(self.Meta.lookup_field, concept.mnemonic) concept.concept_class = attrs.get('concept_class', concept.concept_class) concept.datatype = attrs.get('datatype', concept.datatype) concept.extras = attrs.get('extras', concept.extras) concept.names = attrs.get('names', concept.names) # Is this desired behavior?? concept.descriptions = attrs.get('descriptions', concept.descriptions) # Is this desired behavior?? return concept def save_object(self, obj, **kwargs): errors = Concept.persist_new(obj, **kwargs) self._errors.update(errors) class ConceptVersionListSerializer(ResourceVersionSerializer): id = serializers.CharField(source='name') concept_class = serializers.CharField() datatype = serializers.CharField() retired = serializers.BooleanField() source = serializers.CharField(source='parent_resource') owner = serializers.CharField(source='owner_name') owner_type = serializers.CharField() owner_url = serializers.URLField() display_name = serializers.CharField() display_locale = serializers.CharField() version = serializers.CharField(source='mnemonic') extras = serializers.WritableField() class Meta: model = ConceptVersion versioned_object_field_name = 'url' versioned_object_view_name = 'concept-detail' class ConceptVersionDetailSerializer(ResourceVersionSerializer): type = serializers.CharField(source='versioned_object_type') uuid = serializers.CharField(source='id') id = serializers.CharField(source='name') concept_class = serializers.CharField() datatype = serializers.CharField() display_name = serializers.CharField() display_locale = serializers.CharField() names = LocalizedTextListField() descriptions = LocalizedTextListField() extras = serializers.WritableField() retired = serializers.BooleanField() source = serializers.CharField(source='parent_resource') source_url = HyperlinkedRelatedField(source='source', view_name='source-detail', queryset=Source.objects.all()) owner = serializers.CharField(source='owner_name') owner_type = serializers.CharField() owner_url = serializers.URLField() mappings_url = serializers.URLField() version = serializers.CharField(source='mnemonic') class Meta: model = ConceptVersion versioned_object_field_name = 'url' versioned_object_view_name = 'concept-detail' class ConceptVersionUpdateSerializer(serializers.Serializer): concept_class = serializers.CharField(required=True) datatype = serializers.CharField(required=False) names = LocalizedTextListField(required=True) descriptions = LocalizedTextListField(required=False) extras = serializers.WritableField(required=False) class Meta: model = ConceptVersion def restore_object(self, attrs, instance=None): instance.concept_class = attrs.get('concept_class', instance.concept_class) instance.datatype = attrs.get('datatype', instance.datatype) instance.extras = attrs.get('extras', instance.extras) instance.names = attrs.get('names', instance.names) # Is this desired behavior?? instance.descriptions = attrs.get('descriptions', instance.descriptions) # Is this desired behavior?? return instance def save_object(self, obj, **kwargs): errors = ConceptVersion.persist_clone(obj, **kwargs) self._errors.update(errors) class ConceptReferenceCreateSerializer(serializers.Serializer): url = ConceptReferenceField(source='concept', required=True, view_name='conceptversion-detail', lookup_kwarg='concept_version', queryset=ConceptVersion.objects.all()) id = serializers.CharField(source='mnemonic', required=False) class Meta: model = ConceptReference lookup_field = 'mnemonic' def restore_object(self, attrs, instance=None): concept_reference = instance if instance else ConceptReference() concept = attrs.get('concept', None) if concept: concept_reference.concept = concept if hasattr(concept, '_concept_version'): concept_reference.concept_version = concept._concept_version elif hasattr(concept, '_source_version'): concept_reference.source_version = concept._source_version concept_reference.mnemonic = attrs.get(self.Meta.lookup_field, concept_reference.mnemonic) if not concept_reference.mnemonic: concept_reference.mnemonic = "%s..%s" % (concept_reference.concept.parent, concept_reference.concept) return concept_reference def save_object(self, obj, **kwargs): errors = ConceptReference.persist_new(obj, **kwargs) self._errors.update(errors) class ConceptReferenceDetailSerializer(serializers.Serializer): id = serializers.CharField(source='mnemonic') concept_reference_url = serializers.URLField(read_only=True) concept_class = serializers.CharField(read_only=True) data_type = serializers.CharField(read_only=True) source = serializers.CharField(read_only=True) owner = serializers.CharField(read_only=True, source='owner_name') owner_type = serializers.CharField(read_only=True) owner_url = serializers.URLField(read_only=True) display_name = serializers.CharField(read_only=True) display_locale = serializers.CharField(read_only=True) version = serializers.CharField(read_only=True, source='concept_version') is_current_version = serializers.BooleanField(read_only=True) class Meta: model = ConceptReference lookup_field = 'mnemonic'
Python
0
@@ -1074,32 +1074,82 @@ rs.Serializer):%0A + id = serializers.CharField(source='mnemonic')%0A concept_clas
23086155315b39e814a1a73b49c80b19cbdb8e12
476. Number Complement
p476.py
p476.py
Python
0.999999
@@ -0,0 +1,600 @@ +import unittest%0A%0A%0Aclass Solution(object):%0A def findComplement(self, num):%0A %22%22%22%0A :type num: int%0A :rtype: int%0A %22%22%22%0A mask = num%0A mask %7C= mask %3E%3E 1%0A mask %7C= mask %3E%3E 2%0A mask %7C= mask %3E%3E 4%0A mask %7C= mask %3E%3E 8%0A mask %7C= mask %3E%3E 16%0A return num %5E mask%0A%0A%0Aclass Test(unittest.TestCase):%0A def test(self):%0A self._test(5, 2)%0A self._test(1, 0)%0A%0A def _test(self, num, expected):%0A actual = Solution().findComplement(num)%0A self.assertEqual(actual, expected)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
af19c62cfa27f27cd37bf558ac77a7845dff7754
Create generate_chapters.py
sandbox/generate_chapters.py
sandbox/generate_chapters.py
Python
0.000009
@@ -0,0 +1,367 @@ +@language python%0A%0AVOLUME = 'II'%0ATOTAL_CHAPTERS = 42%0AURL = %22http://www.feynmanlectures.caltech.edu/%7B0%7D_%7B1:02%7D.html%22%0A%0Acopied_position = p.copy()%0A%0Afor index in range(1, TOTAL_CHAPTERS+1):%0A new_node = copied_position.insertAsLastChild()%0A new_node.h = %22@chapter %7B0%7D Chapter %7B1:02%7D - %22.format(VOLUME, index)%0A new_node.b = URL.format(VOLUME, index)%0A%0Ac.redraw_now()%0A
48115d48da43f7f4517d8f55edee95d6c9e7cc45
Create saveGraphToFile.py
save-load/saveGraphToFile.py
save-load/saveGraphToFile.py
Python
0.000001
@@ -0,0 +1,1031 @@ +from py2neo import Graph, Node, Relationship%0Agraph = Graph()%0A%0A# Get username%0Ausr_name = input(%22Enter username: %22)%0Aprint (%22Username: %22 + usr_name)%0A# Get user_id%0Ausr_id = input(%22Enter user id: %22)%0Aprint (%22User id: %22 + usr_id)%0A%0Aprint(%22Nodes%22)%0AresultsAllNodes = graph.cypher.execute(%22MATCH (n) RETURN n%22)%0Aprint(resultsAllNodes)%0Aprint(%22Nodes plus relationships%22)%0AresultsAll = graph.cypher.execute(%22START n=node(*) MATCH (n)-%5Br%5D-%3E(m) RETURN n,r,m%22)%0Aprint(resultsAll)%0A%0Aquery1 = %22MATCH (user %7B username:'%22%0Aquery1 = query1 + usr_name%0Aquery1 = query1 + %22' %7D)-%5B:%22%0Aquery1 = query1 + usr_id%0Aquery1 = query1 + %22%5D-%3E(n) RETURN n%22%0A%0Aresults = graph.cypher.execute(query1);%0Aprint(results)%0A%0Aquery2 = %22MATCH (user %7B username:'%22%0Aquery2 = query2 + usr_name%0Aquery2 = query2 + %22' %7D)-%5B:%22%0Aquery2 = query2 + usr_id%0Aquery2 = query2 + %22%5D-%3E(n)-%5Br%5D-%3E(m) RETURN r%22%0A%0Aresults2 = graph.cypher.execute(query2);%0Aprint(results2)%0A%0Af = open('graph.txt', 'w')%0A%0Af.write(usr_name+%22%5Cn%22)%0Af.write(usr_id+%22%5Cn%22)%0As = str(results)%0As2 = str(results2)%0Af.write(s)%0Af.write(s2)%0Af.close()%0A
34fff4bf13fa2c4d481a06339981db08239138ae
add test case of petitlyrics
lyric_engine/tests/test_petitlyrics.py
lyric_engine/tests/test_petitlyrics.py
Python
0.002379
@@ -0,0 +1,1059 @@ +# coding: utf-8%0Aimport os%0Aimport sys%0Amodule_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'modules')%0Asys.path.append(module_dir)%0A%0Aimport unittest%0Afrom petitlyrics import PetitLyrics as Lyric%0A%0Aclass PetitLyricsTest(unittest.TestCase):%0A def test_url_01(self):%0A url = 'http://petitlyrics.com/lyrics/34690'%0A obj = Lyric(url)%0A obj.parse()%0A%0A self.assertEqual(obj.title, u'Tune The Rainbow')%0A self.assertEqual(obj.artist, u'%E5%9D%82%E6%9C%AC %E7%9C%9F%E7%B6%BE')%0A self.assertEqual(obj.lyricist, u'%E5%B2%A9%E9%87%8C %E7%A5%90%E7%A9%82')%0A self.assertEqual(obj.composer, u'%E8%8F%85%E9%87%8E %E3%82%88%E3%81%86%E5%AD%90')%0A self.assertEqual(len(obj.lyric), 819)%0A%0A def test_url_02(self):%0A url = 'http://petitlyrics.com/lyrics/936622'%0A obj = Lyric(url)%0A obj.parse()%0A%0A self.assertEqual(obj.title, u'RPG')%0A self.assertEqual(obj.artist, u'SEKAI NO OWARI')%0A self.assertEqual(obj.lyricist, u'Saori/Fukase')%0A self.assertEqual(obj.composer, u'Fukase')%0A self.assertEqual(len(obj.lyric), 933)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
5436ba117b687eb3bd4e7afa69d5a67935056342
remove deprecated code from FeedExporter
scrapy/contrib/feedexport.py
scrapy/contrib/feedexport.py
""" Feed Exports extension See documentation in docs/topics/feed-exports.rst """ import sys, os, posixpath from tempfile import TemporaryFile from datetime import datetime from six.moves.urllib.parse import urlparse from ftplib import FTP from zope.interface import Interface, implementer from twisted.internet import defer, threads from w3lib.url import file_uri_to_path from scrapy import log, signals from scrapy.utils.ftp import ftp_makedirs_cwd from scrapy.exceptions import NotConfigured from scrapy.utils.misc import load_object from scrapy.utils.python import get_func_args class IFeedStorage(Interface): """Interface that all Feed Storages must implement""" def __init__(uri): """Initialize the storage with the parameters given in the URI""" def open(spider): """Open the storage for the given spider. It must return a file-like object that will be used for the exporters""" def store(file): """Store the given file stream""" @implementer(IFeedStorage) class BlockingFeedStorage(object): def open(self, spider): return TemporaryFile(prefix='feed-') def store(self, file): return threads.deferToThread(self._store_in_thread, file) def _store_in_thread(self, file): raise NotImplementedError @implementer(IFeedStorage) class StdoutFeedStorage(object): def __init__(self, uri, _stdout=sys.stdout): self._stdout = _stdout def open(self, spider): return self._stdout def store(self, file): pass @implementer(IFeedStorage) class FileFeedStorage(object): def __init__(self, uri): self.path = file_uri_to_path(uri) def open(self, spider): dirname = os.path.dirname(self.path) if dirname and not os.path.exists(dirname): os.makedirs(dirname) return open(self.path, 'ab') def store(self, file): file.close() class S3FeedStorage(BlockingFeedStorage): def __init__(self, uri): from scrapy.conf import settings try: import boto except ImportError: raise NotConfigured self.connect_s3 = boto.connect_s3 u = urlparse(uri) self.bucketname = u.hostname self.access_key = u.username or settings['AWS_ACCESS_KEY_ID'] self.secret_key = u.password or settings['AWS_SECRET_ACCESS_KEY'] self.keyname = u.path def _store_in_thread(self, file): file.seek(0) conn = self.connect_s3(self.access_key, self.secret_key) bucket = conn.get_bucket(self.bucketname, validate=False) key = bucket.new_key(self.keyname) key.set_contents_from_file(file) key.close() class FTPFeedStorage(BlockingFeedStorage): def __init__(self, uri): u = urlparse(uri) self.host = u.hostname self.port = int(u.port or '21') self.username = u.username self.password = u.password self.path = u.path def _store_in_thread(self, file): file.seek(0) ftp = FTP() ftp.connect(self.host, self.port) ftp.login(self.username, self.password) dirname, filename = posixpath.split(self.path) ftp_makedirs_cwd(ftp, dirname) ftp.storbinary('STOR %s' % filename, file) ftp.quit() class SpiderSlot(object): def __init__(self, file, exporter, storage, uri): self.file = file self.exporter = exporter self.storage = storage self.uri = uri self.itemcount = 0 class FeedExporter(object): def __init__(self, settings): self.settings = settings self.urifmt = settings['FEED_URI'] if not self.urifmt: raise NotConfigured self.format = settings['FEED_FORMAT'].lower() self.storages = self._load_components('FEED_STORAGES') self.exporters = self._load_components('FEED_EXPORTERS') if not self._storage_supported(self.urifmt): raise NotConfigured if not self._exporter_supported(self.format): raise NotConfigured self.store_empty = settings.getbool('FEED_STORE_EMPTY') uripar = settings['FEED_URI_PARAMS'] self._uripar = load_object(uripar) if uripar else lambda x, y: None @classmethod def from_crawler(cls, crawler): if len(get_func_args(cls)) < 1: # FIXME: remove for scrapy 0.17 import warnings from scrapy.exceptions import ScrapyDeprecationWarning warnings.warn("%s must receive a settings object as first constructor argument." % cls.__name__, ScrapyDeprecationWarning, stacklevel=2) o = cls() else: o = cls(crawler.settings) crawler.signals.connect(o.open_spider, signals.spider_opened) crawler.signals.connect(o.close_spider, signals.spider_closed) crawler.signals.connect(o.item_scraped, signals.item_scraped) return o def open_spider(self, spider): uri = self.urifmt % self._get_uri_params(spider) storage = self._get_storage(uri) file = storage.open(spider) exporter = self._get_exporter(file) exporter.start_exporting() self.slot = SpiderSlot(file, exporter, storage, uri) def close_spider(self, spider): slot = self.slot if not slot.itemcount and not self.store_empty: return slot.exporter.finish_exporting() logfmt = "%%s %s feed (%d items) in: %s" % (self.format, \ slot.itemcount, slot.uri) d = defer.maybeDeferred(slot.storage.store, slot.file) d.addCallback(lambda _: log.msg(logfmt % "Stored", spider=spider)) d.addErrback(log.err, logfmt % "Error storing", spider=spider) return d def item_scraped(self, item, spider): slot = self.slot slot.exporter.export_item(item) slot.itemcount += 1 return item def _load_components(self, setting_prefix): conf = dict(self.settings['%s_BASE' % setting_prefix]) conf.update(self.settings[setting_prefix]) d = {} for k, v in conf.items(): try: d[k] = load_object(v) except NotConfigured: pass return d def _exporter_supported(self, format): if format in self.exporters: return True log.msg("Unknown feed format: %s" % format, log.ERROR) def _storage_supported(self, uri): scheme = urlparse(uri).scheme if scheme in self.storages: try: self._get_storage(uri) return True except NotConfigured: log.msg("Disabled feed storage scheme: %s" % scheme, log.ERROR) else: log.msg("Unknown feed storage scheme: %s" % scheme, log.ERROR) def _get_exporter(self, *a, **kw): return self.exporters[self.format](*a, **kw) def _get_storage(self, uri): return self.storages[urlparse(uri).scheme](uri) def _get_uri_params(self, spider): params = {} for k in dir(spider): params[k] = getattr(spider, k) ts = datetime.utcnow().replace(microsecond=0).isoformat().replace(':', '-') params['time'] = ts self._uripar(params, spider) return params
Python
0.000001
@@ -4316,392 +4316,8 @@ r):%0A - if len(get_func_args(cls)) %3C 1:%0A # FIXME: remove for scrapy 0.17%0A import warnings%0A from scrapy.exceptions import ScrapyDeprecationWarning%0A warnings.warn(%22%25s must receive a settings object as first constructor argument.%22 %25 cls.__name__,%0A ScrapyDeprecationWarning, stacklevel=2)%0A o = cls()%0A else:%0A
16fd4ba06b6da8ec33a83a8cfe2e38a130fb47b3
Add a module for common plotting routines that will be used.
plot.py
plot.py
Python
0
@@ -0,0 +1,778 @@ +#!/usr/bin/env python%0A%22%22%22%0A plot.py%0A %0A State Estimation and Analysis for PYthon%0A%0A Module with plotting utilities%0A%0A Written by Brian Powell on 10/18/13%0A Copyright (c)2013 University of Hawaii under the BSD-License.%0A%22%22%22%0Afrom __future__ import print_function%0A%0Aimport numpy as np%0Afrom scipy import ndimage%0Aimport os%0Aimport re%0Afrom matplotlib import pyplot as plt%0A%0Adef stackbar(x, y, colors=None, **kwargs):%0A %22%22%22%0A Given an array of vectors in y, draw a bar chart for each one stacked on%0A the prior.%0A %22%22%22%0A s=y%5B0,:%5D%0A if colors is None:%0A colors = %5B %22%22 for i in range(0,y.shape%5B0%5D) %5D%0A plt.bar(x, y%5B0,:%5D, color=colors%5B0%5D, **kwargs)%0A for i in range(1,y.shape%5B0%5D):%0A plt.bar(x, y%5Bi,:%5D, color=colors%5Bi%5D, bottom=s, **kwargs)%0A s=s+y%5Bi,:%5D%0A%0A %0A
cbbc6d943ebc4f7e1efa84f2c0b5d976de21101d
Add "imported from" source using a hardcoded list of items
scripts/claimit.py
scripts/claimit.py
# -*- coding: utf-8 -*- """ Copyright (C) 2013 Legoktm Copyright (C) 2013 Pywikipediabot team Distributed under the MIT License Usage: python claimit.py [pagegenerators] P1 Q2 P123 Q456 You can use any typical pagegenerator to provide with a list of pages Then list the property-->target pairs to add. """ import pywikibot from pywikibot import pagegenerators repo = pywikibot.Site().data_repository() def addClaims(page, claims): ''' The function will add the claims to the wikibase page ''' item = pywikibot.ItemPage.fromPage(page) pywikibot.output('Processing %s' % page) if not item.exists(): pywikibot.output('%s doesn\'t have a wikidata item :(' % page) #TODO FIXME: We should provide an option to create the page return False for claim in claims: if claim.getID() in item.get().get('claims'): pywikibot.output(u'A claim for %s already exists. Skipping' % (claim.getID(),)) #TODO FIXME: This is a very crude way of dupe checking else: pywikibot.output('Adding %s --> %s' % (claim.getID(), claim.getTarget().getID())) item.addClaim(claim) #TODO FIXME: We should add a source for each claim that is added #TODO FIXME: We need to check that we aren't adding a duplicate def main(): gen = pagegenerators.GeneratorFactory() commandline_claims = list() for arg in pywikibot.handleArgs(): if gen.handleArg(arg): continue commandline_claims.append(arg) if len(commandline_claims) % 2: raise ValueError # or something. claims = list() for i in xrange (0, len(commandline_claims), 2): claim = pywikibot.Claim(repo, commandline_claims[i]) claim.setTarget(pywikibot.ItemPage(repo, commandline_claims[i+1])) claims.append(claim) generator = gen.getCombinedGenerator() if generator: for page in generator: addClaims(page, claims) if __name__ == "__main__": main()
Python
0.000051
@@ -401,16 +401,1242 @@ tory()%0A%0A +source_values = %7B'en': pywikibot.ItemPage(repo, 'Q328'),%0A 'sv': pywikibot.ItemPage(repo, 'Q169514'),%0A 'de': pywikibot.ItemPage(repo, 'Q48183'),%0A 'it': pywikibot.ItemPage(repo, 'Q11920'),%0A 'no': pywikibot.ItemPage(repo, 'Q191769'),%0A 'ar': pywikibot.ItemPage(repo, 'Q199700'),%0A 'es': pywikibot.ItemPage(repo, 'Q8449'),%0A 'pl': pywikibot.ItemPage(repo, 'Q1551807'),%0A 'ca': pywikibot.ItemPage(repo, 'Q199693'),%0A 'fr': pywikibot.ItemPage(repo, 'Q8447'),%0A 'nl': pywikibot.ItemPage(repo, 'Q10000'),%0A 'pt': pywikibot.ItemPage(repo, 'Q11921'),%0A 'ru': pywikibot.ItemPage(repo, 'Q206855'),%0A 'vi': pywikibot.ItemPage(repo, 'Q200180'),%0A 'be': pywikibot.ItemPage(repo, 'Q877583'),%0A 'uk': pywikibot.ItemPage(repo, 'Q199698'),%0A 'tr': pywikibot.ItemPage(repo, 'Q58255'),%0A %7D # TODO: This should include all projects%0A%0Aimported_from = pywikibot.Claim(repo, 'p143')%0Asource = source_values.get(pywikibot.Site().language(), None)%0Aif source:%0A imported_from.setTarget(source)%0A %0Adef add @@ -2403,72 +2403,75 @@ -#TODO FIXME: We +if s -h ou -ld add a source for each claim that is added +rce:%0A claim.addSource(imported_from, bot=True) %0A
94d40dfcf574d61df7def99a43d5b9fa0c75e244
Add py solution for 406. Queue Reconstruction by Height
py/queue-reconstruction-by-height.py
py/queue-reconstruction-by-height.py
Python
0.000749
@@ -0,0 +1,1139 @@ +from collections import defaultdict%0Aclass Solution(object):%0A def insert(self, now, p, front):%0A lsize = 0 if now.left is None else now.left.val%5B1%5D%0A if front %3C= lsize:%0A if now.left is None:%0A now.left = TreeNode((p, 1))%0A else:%0A self.insert(now.left, p, front)%0A else:%0A if now.right is None:%0A now.right = TreeNode((p, 1))%0A else:%0A self.insert(now.right, p, front - lsize - 1)%0A now.val = (now.val%5B0%5D, now.val%5B1%5D + 1)%0A%0A def inOrder(self, cur):%0A if cur:%0A for x in self.inOrder(cur.left):%0A yield x%0A yield cur.val%5B0%5D%0A for x in self.inOrder(cur.right):%0A yield x%0A%0A def reconstructQueue(self, people):%0A %22%22%22%0A :type people: List%5BList%5Bint%5D%5D%0A :rtype: List%5BList%5Bint%5D%5D%0A %22%22%22%0A if not people:%0A return people%0A people.sort(key=lambda x:(-x%5B0%5D, x%5B1%5D))%0A root = TreeNode((people%5B0%5D, 1))%0A for p in people%5B1:%5D:%0A self.insert(root, p, p%5B1%5D)%0A return list(self.inOrder(root))%0A%0A
8fb97711dd84512a8a654de3dca2bee24689a2a7
add a test for pytestmark
pytest_tornado/test/test_fixtures.py
pytest_tornado/test/test_fixtures.py
Python
0.000008
@@ -0,0 +1,577 @@ +import pytest%0Afrom tornado import gen%0A%0A_used_fixture = False%0A%0A%[email protected]%0Adef dummy(io_loop):%0A yield gen.Task(io_loop.add_callback)%0A raise gen.Return(True)%0A%0A%[email protected](scope='module')%0Adef preparations():%0A global _used_fixture%0A _used_fixture = True%0A%0A%0Apytestmark = pytest.mark.usefixtures('preparations')%0A%0A%[email protected](pytest.__version__ %3C '2.7.0',%0A reason='py.test 2.7 adds hookwrapper, fixes collection')%[email protected]_test%0Adef test_uses_pytestmark_fixtures(io_loop):%0A assert (yield dummy(io_loop))%0A assert _used_fixture%0A
76ce9117ed92a743734cd5ba7e209617a7664ad1
Add partial benchmarking file for gala
benchmarks/bench_gala.py
benchmarks/bench_gala.py
Python
0
@@ -0,0 +1,1556 @@ +import os%0A%0Afrom gala import imio, features, agglo, classify%0A%0A%0Arundir = os.path.dirname(__file__)%0Add = os.path.abspath(os.path.join(rundir, '../tests/example-data'))%0A%0A%0Aem3d = features.default.paper_em()%0A%0A%0Adef setup_trdata():%0A wstr = imio.read_h5_stack(os.path.join(dd, 'train-ws.lzf.h5'))%0A prtr = imio.read_h5_stack(os.path.join(dd, 'train-p1.lzf.h5'))%0A gttr = imio.read_h5_stack(os.path.join(dd, 'train-gt.lzf.h5'))%0A return wstr, prtr, gttr%0A%0A%0Adef setup_tsdata():%0A wsts = imio.read_h5_stack(os.path.join(dd, 'test-ws.lzf.h5'))%0A prts = imio.read_h5_stack(os.path.join(dd, 'test-p1.lzf.h5'))%0A gtts = imio.read_h5_stack(os.path.join(dd, 'test-gt.lzf.h5'))%0A return wsts, prts, gtts%0A%0A%0Adef setup_trgraph():%0A ws, pr, ts = setup_trdata()%0A g = agglo.Rag(ws, pr, feature_manager=em3d)%0A return g%0A%0A%0Adef setup_tsgraph():%0A ws, pr, ts = setup_tsdata()%0A g = agglo.Rag(ws, pr, feature_manager=em3d)%0A return g%0A%0A%0Adef setup_trexamples():%0A gt = imio.read_h5_stack(os.path.join(dd, 'train-gt.lzf.h5'))%0A g = setup_trgraph()%0A (X, y, w, e), _ = g.learn_agglomerate(gt, em3d, min_num_epochs=5)%0A y = y%5B:, 0%5D%0A return X, y%0A%0A%0Adef setup_classifier():%0A X, y = setup_trexamples()%0A rf = classify.DefaultRandomForest()%0A rf.fit(X, y)%0A return rf%0A%0A%0Adef setup_policy():%0A rf = classify.DefaultRandomForest()%0A cl = agglo.classifier_probability(em3d, rf)%0A return cl%0A%0A%0Adef setup_tsgraph_queue():%0A g = setup_tsgraph()%0A cl = setup_policy()%0A g.merge_priority_function = cl%0A g.rebuild_merge_queue()%0A return g%0A%0A%0A
875015d263e5942cebf223ea5c85f8fe35d9f561
Version of gen that doesn't use itertools.
gen2.py
gen2.py
Python
0
@@ -0,0 +1,1648 @@ +'''%0ASeveral functions for generating subets of sigma star (in list form). Namely:%0Asigma_k, sigma_0k, sigma_1k, each of which takes an alphabet and a value for k.%0AUnlike gen.py, gen2.py does not import itertools.py.%0A'''%0A%0Adef sigma_k(alphabet, k):%0A '''returns words in sigma-k'''%0A def helper(i):%0A if i == 0:%0A ilang = %5B''%5D%0A else:%0A ilang = %5B%5D%0A hlang = sigma_k(alphabet, k - 1)%0A for word in hlang:%0A ilang += %5Bword + a for a in alphabet%5D%0A return ilang%0A return helper(k)%0A%0Adef sigma_0k(alphabet, k):%0A '''Return all words in all sigma-n from n=1 to k.'''%0A assert k %3E= 0%0A def helper(i):%0A language = %5B%5D%0A if i == 0:%0A ilang = %5B''%5D%0A language = %5B''%5D%0A else:%0A ilang = %5B%5D%0A hlang, language = helper(i - 1)%0A for word in hlang:%0A ilang += %5Bword + a for a in alphabet%5D%0A language += %5Bword + a for a in alphabet%5D%0A return (ilang, language)%0A return helper(k)%5B1%5D%0A%0Adef sigma_1k(alphabet, k):%0A '''Return all words in all sigma-n from n=1 to k.'''%0A assert k %3E 0%0A def helper(i):%0A language = %5B%5D%0A if i == 0:%0A ilang = %5B''%5D%0A language = %5B%5D #Only difference for sigma 1k%0A else:%0A ilang = %5B%5D%0A hlang, language = helper(i - 1)%0A for word in hlang:%0A ilang += %5Bword + a for a in alphabet%5D%0A language += %5Bword + a for a in alphabet%5D%0A return (ilang, language)%0A return helper(k)%5B1%5D%0A%0A# Example alphabet%0Acv = %5B'C','V'%5D%0A%0A# Example language%0Acv6 = sigma_0k(cv,6)%0A
180faadb24bf3b4d153f1c46c4883bdcc0b987ff
add a manifest (.cvmfspublished) abstraction class
python/cvmfs/manifest.py
python/cvmfs/manifest.py
Python
0.000001
@@ -0,0 +1,2603 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated by Ren%C3%A9 Meusel%0AThis file is part of the CernVM File System auxiliary tools.%0A%22%22%22%0A%0Aimport datetime%0A%0Aclass UnknownManifestField:%0A def __init__(self, key_char):%0A self.key_char = key_char%0A%0A def __str__(self):%0A return self.key_char%0A%0Aclass ManifestValidityError:%0A def __init__(self, message):%0A Exception.__init__(self, message)%0A%0A%0Aclass Manifest:%0A %22%22%22 Wraps information from .cvmfspublished%22%22%22%0A%0A def __init__(self, manifest_file):%0A %22%22%22 Initializes a Manifest object from a file pointer to .cvmfspublished %22%22%22%0A for line in manifest_file.readlines():%0A if len(line) == 0:%0A continue%0A if line%5B0:2%5D == %22--%22:%0A break%0A self._read_line(line)%0A self._check_validity()%0A%0A%0A def __str__(self):%0A return %22%3CManifest for %22 + self.repository_name + %22%3E%22%0A%0A%0A def __repr__(self):%0A return self.__str__()%0A%0A%0A def _read_line(self, line):%0A %22%22%22 Parse lines that appear in .cvmfspublished %22%22%22%0A key_char = line%5B0%5D%0A data = line%5B1:-1%5D%0A if key_char == %22C%22:%0A self.root_catalog = data%0A elif key_char == %22X%22:%0A self.certificate = data%0A elif key_char == %22H%22:%0A self.history_database = data%0A elif key_char == %22T%22:%0A self.last_modified = datetime.datetime.fromtimestamp(int(data))%0A elif key_char == %22R%22:%0A self.root_hash = data%0A elif key_char == %22D%22:%0A self.ttl = int(data)%0A elif key_char == %22S%22:%0A self.revision = int(data)%0A elif key_char == %22N%22:%0A self.repository_name = data%0A elif key_char == %22L%22:%0A self.unknown_field1 = data # TODO: ask Jakob what L means%0A else:%0A raise UnknownManifestField(key_char)%0A%0A%0A def _check_validity(self):%0A %22%22%22 Checks that all mandatory fields are found in .cvmfspublished %22%22%22%0A if not hasattr(self, 'root_catalog'):%0A raise ManifestValidityError(%22Manifest lacks a root catalog entry%22)%0A if not hasattr(self, 'root_hash'):%0A raise ManifestValidityError(%22Manifest lacks a root hash entry%22)%0A if not hasattr(self, 'ttl'):%0A raise ManifestValidityError(%22Manifest lacks a TTL entry%22)%0A if not hasattr(self, 'revision'):%0A raise ManifestValidityError(%22Manifest lacks a revision entry%22)%0A if not hasattr(self, 'repository_name'):%0A raise ManifestValidityError(%22Manifest lacks a repository name%22)%0A
e044dceeb4f6dd91a1e29228cde7906a114f36ba
add ping-listener.py
src/ping-listener.py
src/ping-listener.py
Python
0
@@ -0,0 +1,696 @@ +#!/usr/bin/python%0A%0A# This tool is for educational use only!%0A%0A# Description: Listen on a networkinterface for incomming pings (ICMP packets)%0A# and display this pings on the console%0A%0A# Requirements: scapy + root privileges%0A%0Aimport sys%0Afrom scapy.all import *%0Afrom pprint import *%0A%0Adef printusage():%0A %22%22%22 Prints usage information %22%22%22%0A print %22Usage: %7B0%7D %3Ciface%3E%22.format(sys.argv%5B0%5D)%0A print %22 ---%3E This tool is for educational use only! %3C---%22%0A%0A%0Aif len(sys.argv) %3C 2:%0A printusage()%0A sys.exit(1)%0A%0Adef icmp_callback(packet):%0A # print the whole networkpacket object on the console%0A # TODO: Optimize output...%0A pprint(packet)%0A%0Asniff(prn=icmp_callback, filter=%22icmp%22, iface=sys.argv%5B1%5D, store=0)%0A
942b7c519a07a84c7f26077b78c23c60174e1141
Add VCF precalculator
scripts/precalc.py
scripts/precalc.py
Python
0.000001
@@ -0,0 +1,1799 @@ +# -*- coding: utf-8 -*-%0A'''%0A%09Earth Engine precalculator for CLASlite%0A%09Requires Python 2.6+%0A%09%0A%09Public Domain where allowed, otherwise:%0A%09Copyright 2010 Michael Geary - http://mg.to/%0A%09Use under MIT, GPL, or any Open Source license:%0A%09http://www.opensource.org/licenses/%0A'''%0A%0Aimport cgi, json, os, sys, time, urllib2%0A%0Asys.path.append( os.path.abspath('../web/app') )%0Aimport private%0Abase = private.private%5B'earth-engine-api'%5D%0Aauth = private.private%5B'earth-engine-auth'%5D%0A%0Asat = 'LANDSAT/L7_L1T'%0Abbox = '-61.6,-11.4,-60.8,-10.6'%0A%0Adef fetch( api ):%0A%09req = urllib2.Request(%0A%09%09url = base + api,%0A%09%09headers = %7B 'Authorization': 'GoogleLogin auth=' + auth %7D%0A%09)%0A%09try:%0A%09%09f = urllib2.urlopen( req, None, 600 )%0A%09%09data = f.read()%0A%09%09f.close()%0A%09%09return json.loads( data )%0A%09except urllib2.HTTPError, error:%0A%09%09return error.read()%0A%0Adef listImages( sat, bbox ):%0A%09return fetch( 'list?id=%25s&bbox=%25s' %25( sat, bbox ) )%5B'data'%5D%0A%0Adef calcVCF( id ):%0A%09return fetch( vcfAPI(id) )%0A%0Adef vcfAPI( id ):%0A%09return 'value?image=%7B%22creator%22:%22CLASLITE/VCFAdjustedImage%22,%22args%22:%5B%7B%22creator%22:%22CLASLITE/AutoMCU%22,%22args%22:%5B%22%25s%22,%7B%22creator%22:%22CLASLITE/Reflectance%22,%22args%22:%5B%7B%22creator%22:%22CLASLITE/Calibrate%22,%22args%22:%5B%22%25s%22%5D%7D%5D%7D%5D%7D,%22MOD44B_C4_TREE_2000%22%5D%7D&fields=vcf_adjustment' %25( id, id )%0A%0Adef main():%0A%09images = listImages( sat, bbox )%0A%09count = len(images)%0A%09n = 0%0A%09for image in images:%0A%09%09id = image%5B'id'%5D%0A%09%09n += 1%0A%09%09print 'Loading %25d/%25d: %25s' %25( n, count, id )%0A%09%09t = time.time()%0A%09%09vcf = calcVCF( id )%0A%09%09t = time.time() - t%0A%09%09report( vcf, t )%0A%0Adef report( vcf, t ):%0A%09adjustment = vcf%5B'data'%5D%5B'properties'%5D%5B'vcf_adjustment'%5D%0A%09forest = adjustment%5B'forest_pixel_count'%5D%0A%09valid = adjustment%5B'valid_pixel_count'%5D%0A%09if valid %3E 0:%0A%09%09percent = forest * 100 / valid%0A%09else:%0A%09%09percent = 0%0A%09print '%25d seconds, %25d%25%25 forest' %25( t, percent )%0A%0Aif __name__ == %22__main__%22:%0A%09main()%0A
8b9a8f6443c1a5e184ececa4ec03baabca0973de
Add support for Pocket
services/pocket.py
services/pocket.py
Python
0
@@ -0,0 +1,1725 @@ +from werkzeug.urls import url_decode%0Aimport requests%0Aimport foauth.providers%0A%0A%0Aclass Pocket(foauth.providers.OAuth2):%0A # General info about the provider%0A provider_url = 'http://getpocket.com/'%0A docs_url = 'http://getpocket.com/developer/docs/overview'%0A category = 'News'%0A%0A # URLs to interact with the API%0A request_token_url = 'https://getpocket.com/v3/oauth/request'%0A authorize_url = 'https://getpocket.com/auth/authorize'%0A access_token_url = 'https://getpocket.com/v3/oauth/authorize'%0A api_domain = 'getpocket.com'%0A%0A available_permissions = %5B%0A (None, 'access your saved articles'),%0A %5D%0A supports_state = False%0A%0A def get_authorize_params(self, redirect_uri, scopes):%0A params = super(Pocket, self).get_authorize_params(redirect_uri, scopes)%0A r = requests.post(self.request_token_url, data=%7B%0A 'consumer_key': params%5B'client_id'%5D,%0A 'redirect_uri': redirect_uri,%0A %7D)%0A data = url_decode(r.content)%0A redirect_uri = '%25s&code=%25s' %25 (params%5B'redirect_uri'%5D, data%5B'code'%5D)%0A return %7B%0A 'request_token': data%5B'code'%5D,%0A 'redirect_uri': redirect_uri,%0A %7D%0A%0A def get_access_token_response(self, redirect_uri, data):%0A return requests.post(self.get_access_token_url(), %7B%0A 'consumer_key': self.client_id,%0A 'code': data%5B'code'%5D,%0A 'redirect_uri': redirect_uri%0A %7D)%0A%0A def parse_token(self, content):%0A data = url_decode(content)%0A data%5B'service_user_id'%5D = data%5B'username'%5D%0A return data%0A%0A def bearer_type(self, token, r):%0A r.prepare_url(r.url, %7B'consumer_key': self.client_id, 'access_token': token%7D)%0A return r%0A
8244d71a41032e41bd79741ec649fa78c6317efa
add mixins for tweaking smartmin behavior more easily
smartmin/mixins.py
smartmin/mixins.py
Python
0
@@ -0,0 +1,260 @@ +%0A# simple mixins that keep you from writing so much code%0Aclass PassRequestToFormMixin(object):%0A def get_form_kwargs(self):%0A kwargs = super(PassRequestToFormMixin, self).get_form_kwargs()%0A kwargs%5B'request'%5D = self.request%0A return kwargs%0A
32dd2099f97add61cb31df7af796876a95695bb1
Add a sample permission plugin for illustrating the check on realm resources, related to #6211.
sample-plugins/public_wiki_policy.py
sample-plugins/public_wiki_policy.py
Python
0.000003
@@ -0,0 +1,2047 @@ +from fnmatch import fnmatchcase%0A%0Afrom trac.config import Option%0Afrom trac.core import *%0Afrom trac.perm import IPermissionPolicy%0A%0Aclass PublicWikiPolicy(Component):%0A %22%22%22Sample permission policy plugin illustrating how to check %0A permission on realms.%0A%0A Don't forget to integrate that plugin in the appropriate place in the%0A list of permission policies:%0A %7B%7B%7B%0A %5Btrac%5D%0A permission_policies = PublicWikiPolicy, DefaultPermissionPolicy%0A %7D%7D%7D%0A%0A Then you can configure which pages you want to make public:%0A %7B%7B%7B%0A %5Bpublic_wiki%5D%0A view = Public*%0A modify = PublicSandbox/*%0A %7D%7D%7D%0A%0A %22%22%22%0A%0A implements(IPermissionPolicy)%0A%0A view = Option('public_wiki', 'view', 'Public*',%0A %22%22%22Case-sensitive glob pattern used for granting view permission on%0A all Wiki pages matching it.%22%22%22)%0A%0A modify = Option('public_wiki', 'modify', 'Public*',%0A %22%22%22Case-sensitive glob pattern used for granting modify permissions%0A on all Wiki pages matching it.%22%22%22)%0A%0A def check_permission(self, action, username, resource, perm):%0A if resource: # fine-grained permission check%0A if resource.realm == 'wiki': # wiki realm or resource%0A if resource.id: # ... it's a resource%0A if action == 'WIKI_VIEW': # (think 'VIEW' here)%0A pattern = self.view%0A else:%0A pattern = self.modify%0A if fnmatchcase(resource.id, pattern):%0A return True%0A else: # ... it's a realm%0A return True %0A # this policy ''may'' grant permissions on some wiki pages%0A else: # coarse-grained permission check%0A # %0A # support for the legacy permission checks: no resource specified%0A # and realm information in the action name itself.%0A #%0A if action.startswith('WIKI_'):%0A return True%0A # this policy ''may'' grant permissions on some wiki pages%0A%0A
1c39eb113be409ff304a675ef8a85e96a97b1d87
Add files via upload
basicTwitter.py
basicTwitter.py
Python
0
@@ -0,0 +1,1887 @@ +'''%0ARIT SPEX: Twitter posting basic.%0A%0ABasic python script for posting to twitter.%0A%0APre-Req:%0A Python3%0A Tweepy library twitter%0A%0AContributors:%0A Evan Putnam%0A Henry Yaeger%0A John LeBrun%0A Helen O'Connell%0A'''%0A%0A%0Aimport tweepy%0A%0A%0A#Tweet a picutre%0Adef tweetPicture(api ,picUrl):%0A api.update_with_media(picUrl)%0A%0A#Tweet a post%0Adef tweetPost(api, postStr):%0A api.update_status(postStr)%0A%0Adef apiSetUp(conKey, conSec, accTok, accSec):%0A '''%0A Sets up the api object.%0A :param conKey:%0A :param conSec:%0A :param accTok:%0A :param accSec:%0A :return:%0A '''%0A #Authenicates keys...%0A auth = tweepy.OAuthHandler(conKey, conSec)%0A auth.set_access_token(accTok, accSec)%0A%0A #Api object%0A api = tweepy.API(auth)%0A return api%0A%0A%0Adef main():%0A %22%22%22%0A NOTE: Do not send code to others with the consumer keys and access tokens. It will allow them to access your twitter%0A application. This program is simple. Enter 1 to post a twitter text post and 2 for an image post...%0A :return:%0A %22%22%22%0A%0A #REPLACE WITH CONSUMER KEYS%0A conKey = %22%22%0A conSec = %22%22%0A%0A #REPLACE WITH ACCESS TOKENS%0A accTok = %22%22%0A accSec = %22%22%0A%0A if conKey == %22%22 or conSec == %22%22 or accTok == %22%22 or accSec == %22%22:%0A print(%22WARNING YOU HAVE NOT ENTERERED YOUR INFORMATION%22)%0A%0A #Authenicates keys...%0A auth = tweepy.OAuthHandler(conKey, conSec)%0A auth.set_access_token(accTok, accSec)%0A%0A #Api object%0A api = tweepy.API(auth)%0A%0A print(%22Press and enter 1 to post a text tweet%22)%0A print(%22Press and enter 2 to post an image tweet%22)%0A option = int(input(%22Enter Option(1 or 2):%22))%0A if option == 1:%0A post = (input(%22Enter Post:%22))%0A tweetPost(api, post)%0A elif option == 2:%0A print(%22Image must be in folder of program%22)%0A imagePath = (input(%22Enter Image Path:%22))%0A tweetPicture(api,imagePath)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
eb170653e64c5a874a773dc37c99dccb4dd42608
Add tools.color module (#41, #36))
xentica/tools/color.py
xentica/tools/color.py
Python
0
@@ -0,0 +1,786 @@ +%22%22%22A collection of color conversion helpers.%22%22%22%0A%0A%0Adef hsv2rgb(hue, sat, val):%0A %22%22%22%0A Convert HSV color to RGB format.%0A%0A :param hue: Hue value %5B0, 1%5D%0A :param sat: Saturation value %5B0, 1%5D%0A :param val: Brightness value %5B0, 1%5D%0A%0A :returns: tuple (red, green, blue)%0A%0A %22%22%22%0A raise NotImplementedError%0A%0A%0Adef rgb2hsv(red, green, blue):%0A %22%22%22%0A Convert RGB color to HSV format.%0A%0A :param red: Red value %5B0, 1%5D%0A :param green: Green value %5B0, 1%5D%0A :param blue: Blue value %5B0, 1%5D%0A%0A :returns: tuple (hue, sat, val)%0A%0A %22%22%22%0A raise NotImplementedError%0A%0A%0Adef genome2rgb(genome):%0A %22%22%22%0A Convert genome bit value to RGB color.%0A%0A :param genome: Genome as integer (bit) sequence.%0A%0A :returns: tuple (red, green, blue)%0A%0A %22%22%22%0A raise NotImplementedError%0A
1d5f1576a5f92c1917fa29c457e4b7ad055f41ca
Add info for [email protected] (#5405)
var/spack/repos/builtin/packages/mono/package.py
var/spack/repos/builtin/packages/mono/package.py
############################################################################### # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Mono(AutotoolsPackage): """Mono is a software platform designed to allow developers to easily create cross platform applications. It is an open source implementation of Microsoft's .NET Framework based on the ECMA standards for C# and the Common Language Runtime. """ homepage = "http://www.mono-project.com/" url = "https://download.mono-project.com/sources/mono/mono-5.0.1.1.tar.bz2" # /usr/share/.mono/keypairs needs to exist or be able to be # created, e.g. https://github.com/gentoo/dotnet/issues/6 variant('patch-folder-path', default=False, description='Point SpecialFolder.CommonApplicationData folder ' 'into Spack installation instead of /usr/share') # Spack's openssl interacts badly with mono's vendored # "boringssl", don't drag it in w/ cmake depends_on('cmake~openssl', type=('build')) depends_on('libiconv') depends_on('perl', type=('build')) version('5.0.1.1', '17692c7a797f95ee6f9a0987fda3d486') version('4.8.0.524', 'baeed5b8139a85ad7e291d402a4bcccb') def patch(self): if '+patch-folder-path' in self.spec: before = 'return "/usr/share";' after = 'return "{0}";'.format(self.prefix.share) f = 'mcs/class/corlib/System/Environment.cs' kwargs = {'ignore_absent': False, 'backup': True, 'string': True} filter_file(before, after, f, **kwargs) def configure_args(self): args = [] li = self.spec['libiconv'].prefix args.append('--with-libiconv-prefix={p}'.format(p=li)) return args
Python
0
@@ -2217,16 +2217,77 @@ ild'))%0A%0A + version('5.4.0.167', '103c7a737632046a9e9a0b039d752ee1')%0A vers
d03edd6670c130925fa8b947ebde03f2026602c3
remove redundant verbose prints
dj/scripts/mk_public.py
dj/scripts/mk_public.py
#!/usr/bin/python # mk_public.py - flip state on hosts from private to public # private = not listed, can be seen if you know the url # the presenters have been emaild the URL, # they are encouraged to advertise it. # public = advertised, it is ready for the world to view. # It will be tweeted at @NextDayVideo from steve.richardapi import update_video, MissingRequiredData from steve.restapi import API, get_content import youtube_uploader import gdata.youtube from gdata.media import YOUTUBE_NAMESPACE from atom import ExtensionElement import atom import pw from process import process import pprint from main.models import Show, Location, Episode, Raw_File, Cut_List class mk_public(process): ready_state = 9 def up_richard(self, ep): host = pw.richard[ep.show.client.richard_id] endpoint = 'http://{hostname}/api/v1'.format(hostname=host['host']) api = API(endpoint) vid = ep.public_url.split('/video/')[1].split('/')[0] response = api.video(vid).get( username=host['user'], api_key=host['api_key']) video_data = get_content(response) video_data['state'] = 1 try: update_video(endpoint, host['user'], host['api_key'], vid, video_data) except MissingRequiredData, e: # this shouldn't happen, prolly debugging something. import code code.interact(local=locals()) return True def up_youtube(self, ep): uploader = youtube_uploader.Uploader() uploader.user = ep.show.client.youtube_id return uploader.set_permission( ep.host_url ) def process_ep(self, ep): if self.options.verbose: print ep.id, ep.name # set youtube to public # set richard state to live ret = True # if something breaks, this will be false # don't make public if there is no host_url (youtube) if ep.public_url and ep.host_url: ret = ret and self.up_richard(ep) if self.options.verbose: print "Richard public." if ep.host_url: ret = ret and self.up_youtube(ep) if self.options.verbose: print "Youtube public." return ret if __name__ == '__main__': p=mk_public() p.main()
Python
0.956212
@@ -1694,62 +1694,8 @@ p):%0A - if self.options.verbose: print ep.id, ep.name%0A
197fb6ec004c0bf47ec7e2fd25b75564a3ecf6c4
Add tests for logging of rest requests
test/audit_logs/test_audit_log.py
test/audit_logs/test_audit_log.py
Python
0
@@ -0,0 +1,1758 @@ +import datetime%0Aimport pytest%0Afrom girder import auditLogger%0A%0A%[email protected]%0Adef recordModel():%0A from girder.plugins.audit_logs import Record%0A yield Record()%0A%0A%[email protected]%0Adef resetLog():%0A yield auditLogger%0A%0A for handler in auditLogger.handlers:%0A auditLogger.removeHandler(handler)%0A%0A%[email protected]('audit_logs')%0Adef testAnonymousRestRequestLogging(server, recordModel, resetLog):%0A assert list(recordModel.find()) == %5B%5D%0A%0A server.request('/user/me')%0A%0A records = recordModel.find()%0A assert records.count() == 1%0A record = records%5B0%5D%0A%0A assert record%5B'ip'%5D == '127.0.0.1'%0A assert record%5B'type'%5D == 'rest.request'%0A assert record%5B'userId'%5D == None%0A assert isinstance(record%5B'when'%5D, datetime.datetime)%0A assert record%5B'details'%5D%5B'method'%5D == 'GET'%0A assert record%5B'details'%5D%5B'status'%5D == 200%0A assert record%5B'details'%5D%5B'route'%5D == %5B'user', 'me'%5D%0A assert record%5B'details'%5D%5B'params'%5D == %7B%7D%0A%0A%[email protected]('audit_logs')%0Adef testFailedRestRequestLogging(server, recordModel, resetLog):%0A server.request('/folder', method='POST', params=%7B%0A 'name': 'Foo',%0A 'parentId': 'foo'%0A %7D)%0A records = recordModel.find()%0A%0A assert records.count() == 1%0A details = records%5B0%5D%5B'details'%5D%0A%0A assert details%5B'method'%5D == 'POST'%0A assert details%5B'status'%5D == 401%0A assert details%5B'route'%5D == %5B'folder'%5D%0A assert details%5B'params'%5D == %7B%0A 'name': 'Foo',%0A 'parentId': 'foo'%0A %7D%0A%0A%[email protected]('audit_logs')%0Adef testAuthenticatedRestRequestLogging(server, recordModel, resetLog, admin):%0A server.request('/user/me', user=admin)%0A records = recordModel.find()%0A assert records.count() == 1%0A record = records%5B0%5D%0A assert record%5B'userId'%5D == admin%5B'_id'%5D%0A
9e96a7ff9ad715f58d07341bd571e63ef233ffdb
Create fizzbuzz.py
job_interview_algs/fizzbuzz.py
job_interview_algs/fizzbuzz.py
Python
0.00001
@@ -0,0 +1,481 @@ +#!/usr/bin/env python%0A# -*- coding: UTF-8 -*-%0A%0A'''%0ACreated on 24 02 2016%0A@author: vlad%0A'''%0A%0A%0Adef multiple_of_3(number):%0A return number %25 3 == 0%0A%0Adef multiple_of_5(number):%0A return number %25 5 == 0%0A %0Afor i in range(1, 100):%0A if not multiple_of_3(i) and not multiple_of_5(i):%0A print i%0A continue%0A %0A if multiple_of_3(i) and multiple_of_5(i):%0A print %22fizzbuzz%22%0A continue%0A %0A else:%0A print %5B%22fizz%22, %22buzz%22%5D%5Bmultiple_of_5(i)%5D%0A %0A
8089750d5dccadb0603068eefec869df4f8360cc
Add fizzbuzz.py in strings folder
strings/fizzbuzz.py
strings/fizzbuzz.py
Python
0.000001
@@ -0,0 +1,1393 @@ +%22%22%22%0AWtite a function that returns an array containing the numbers from 1 to N, %0Awhere N is the parametered value. N will never be less than 1.%0A%0AReplace certain values however if any of the following conditions are met:%0A%0AIf the value is a multiple of 3: use the value 'Fizz' instead%0AIf the value is a multiple of 5: use the value 'Buzz' instead%0AIf the value is a multiple of 3 & 5: use the value 'FizzBuzz' instead%0A%22%22%22%0A%0A%22%22%22%0AThere is no fancy algorithm to solve fizz buzz.%0A%0AIterate from 1 through n%0AUse the mod operator to determine if the current iteration is divisible by:%0A3 and 5 -%3E 'FizzBuzz'%0A3 -%3E 'Fizz'%0A5 -%3E 'Buzz'%0Aelse -%3E string of current iteration%0Areturn the results%0AComplexity:%0A%0ATime: O(n)%0ASpace: O(n)%0A%22%22%22%0A%0Adef fizzbuzz(n):%0A %0A # Validate the input%0A if n %3C 1:%0A raise ValueError('n cannot be less than one')%0A if n is None:%0A raise TypeError('n cannot be None')%0A %0A result = %5B%5D%0A %0A for i in range(1, n+1):%0A if i%253 == 0 and i%255 == 0:%0A result.append('FizzBuzz')%0A elif i%253 == 0:%0A result.append('Fizz')%0A elif i%255 == 0:%0A result.append('Buzz')%0A else:%0A result.append(i)%0A return result%0A%0A# Alternative solution%0Adef fizzbuzz_with_helper_func(n):%0A return %5Bfb(m) for m in range(1,n+1)%5D%0A %0Adef fb(m):%0A r = (m %25 3 == 0) * %22Fizz%22 + (m %25 5 == 0) * %22Buzz%22%0A return r if r != %22%22 else m%0A
9ea3c14983c7b2e32132f1ffe6bbbe7b4d19000c
Add Flyweight.py
Python/Flyweight/Flyweight.py
Python/Flyweight/Flyweight.py
Python
0.000005
@@ -0,0 +1,763 @@ +#! /usr/bin/python%0A# -*- coding: utf-8 -*-%0A'''%0AFlyweight Pattern%0AAuthor: Kei Nakata%0AData: Oct.14.2014%0A'''%0A%0Aclass FlyweightFactory(object):%0A def __init__(self):%0A self.instances = dict()%0A%0A def getInstance(self, a, b):%0A if (a, b) not in self.instances:%0A self.instances%5B(a,b)%5D = Flyweight(a, b)%0A return self.instances%5B(a, b)%5D%0A%0Aclass Flyweight(object):%0A def __init__(self, a, b):%0A self.a = a%0A self.b = b%0A%0A def method(self):%0A print self.a, self.b%0A%0Aif __name__ == '__main__':%0A factory = FlyweightFactory()%0A a = factory.getInstance(1, 2)%0A b = factory.getInstance(3, 2)%0A c = factory.getInstance(1, 2)%0A a.method()%0A b.method()%0A c.method()%0A print id(a)%0A print id(b)%0A print id(c)%0A%0A
868a771e0ba049edd55ddf38db852c4d34824297
Add pod env tests
tests/test_spawner/test_pod_environment.py
tests/test_spawner/test_pod_environment.py
Python
0
@@ -0,0 +1,1140 @@ +from unittest import TestCase%0A%0Afrom scheduler.spawners.templates.pod_environment import (%0A get_affinity,%0A get_node_selector,%0A get_tolerations%0A)%0A%0A%0Aclass TestPodEnvironment(TestCase):%0A def test_pod_affinity(self):%0A assert get_affinity(None, None) is None%0A assert get_affinity(%7B'foo': 'bar'%7D, None) == %7B'foo': 'bar'%7D%0A assert get_affinity(None, '%7B%22foo%22: %22bar%22%7D') == %7B'foo': 'bar'%7D%0A assert get_affinity(%7B'foo': 'bar'%7D, '%7B%22foo%22: %22moo%22%7D') == %7B'foo': 'bar'%7D%0A%0A def get_pod_node_selector(self):%0A assert get_node_selector(None, None) is None%0A assert get_node_selector(%7B'foo': 'bar'%7D, None) == %7B'foo': 'bar'%7D%0A assert get_node_selector(None, '%7B%22foo%22: %22bar%22%7D') == %7B'foo': 'bar'%7D%0A assert get_node_selector(%7B'foo': 'bar'%7D, '%7B%22foo%22: %22moo%22%7D') == %7B'foo': 'bar'%7D%0A%0A def get_pod_tolerations(self):%0A assert get_tolerations(None, None) is None%0A assert get_tolerations(%5B%7B'foo': 'bar'%7D%5D, None) == %5B%7B'foo': 'bar'%7D%5D%0A assert get_tolerations(None, '%5B%7B%22foo%22: %22bar%22%7D%5D') == %5B%7B'foo': 'bar'%7D%5D%0A assert get_tolerations(%5B%7B'foo': 'bar'%7D%5D, '%5B%7B%22foo%22: %22moo%22%7D%5D') == %7B'foo': 'bar'%7D%0A
9ea17a6d3eb5f9af246be964651f5bef522f2d95
Map out the TrustChain data structure
src/trustchain.py
src/trustchain.py
Python
0.999389
@@ -0,0 +1,1915 @@ +class TrustChain:%0A %22%22%22%0A Node maintains one TrustChain object and interacts with it either in in the reactor process or some other process.%0A If it's the latter, there needs to be some communication mechanism.%0A%0A type System = Map%3CNode, Chain%3E;%0A%0A // height (sequence number) should match the index%0A type Chain = List%3CBlock%3E;%0A%0A struct Node %7B%0A pk: %5Bu8; 32%5D,%0A addr: SocketAddr,%0A // ...%0A %7D%0A%0A struct Signature %7B%0A pk: %5Bu8; 32%5D,%0A sig: %5Bu8, 32%5D,%0A %7D%0A%0A enum Block %7B%0A TxBlock,%0A CpBlock,%0A %7D%0A%0A struct TxBlock %7B%0A prev: Digest,%0A h_s: u64,%0A h_r: u64,%0A s_s: Signature,%0A s_r: Signature,%0A m: String,%0A%0A // items below are not a part of the block digest%0A validity: Valid %7C Invalid %7C Unknown%0A %7D%0A%0A struct Cons %7B%0A round: u64,%0A blocks: List%3CCpBlock%3E,%0A ss: List%3CSignature%3E,%0A %7D%0A%0A struct CpBlock %7B%0A prev: Digest,%0A round: u64, // of the Cons%0A con: Digest, // of the Cons%0A p: bool, // promoter registration%0A s: Signature,%0A %7D%0A%0A %22%22%22%0A%0A def __init__(self):%0A self.myself = None%0A self.system = None%0A%0A def new_tx(self, tx):%0A %22%22%22%0A Verify tx, follow the rule and mutates the state to add it%0A :return: None%0A %22%22%22%0A pass%0A%0A def new_cp(self, cp):%0A %22%22%22%0A Verify the cp, follow the rule a nd mutate the state to add it%0A :return: None%0A %22%22%22%0A pass%0A%0A def pieces(self, tx):%0A %22%22%22%0A tx must exist, return the pieces of tx%0A :param tx:%0A :return: List%3CBlock%3E%0A %22%22%22%0A pass%0A%0A def verify(self, tx, resp):%0A %22%22%22%0A%0A :param tx:%0A :param resp:%0A :return:%0A %22%22%22%0A pass%0A%0A def _enclosure(self, tx):%0A %22%22%22%0A%0A :param tx:%0A :return: (CpBlock, CpBlock)%0A %22%22%22%0A pass
1c1e933fa9c6af1aa9d73f276ac7b79c2b86bdc3
add svn-clean-external-file.py
scripts/svn-clean-external-file.py
scripts/svn-clean-external-file.py
Python
0.000002
@@ -0,0 +1,685 @@ +# written by Thomas Watnedal%0A# http://stackoverflow.com/questions/239340/automatically-remove-subversion-unversioned-files%0A%0Aimport os%0Aimport re%0A%0Adef removeall(path):%0A if not os.path.isdir(path):%0A os.remove(path)%0A return%0A files=os.listdir(path)%0A for x in files:%0A fullpath=os.path.join(path, x)%0A if os.path.isfile(fullpath):%0A os.remove(fullpath)%0A elif os.path.isdir(fullpath):%0A removeall(fullpath)%0A os.rmdir(path)%0A%0AunversionedRex = re.compile('%5E ?%5B%5C?ID%5D *%5B1-9 %5D*%5Ba-zA-Z%5D* +(.*)')%0Afor l in os.popen('svn status --no-ignore -v').readlines():%0A match = unversionedRex.match(l)%0A if match: removeall(match.group(1))%0A%0A
3ef1e39d476a8b3e41ff0b06dcd6f700c083682d
Add an ABC for all sub classes of `DataController`
data_controller/abc.py
data_controller/abc.py
Python
0.000003
@@ -0,0 +1,3129 @@ +from typing import Dict, Optional%0A%0Afrom data_controller.enums import Medium, Site%0Afrom utils.helpers import await_func%0A%0A%0Aclass DataController:%0A %22%22%22%0A An ABC for all classes that deals with database read write.%0A %22%22%22%0A __slots__ = ()%0A%0A def get_identifier(self, query: str,%0A medium: Medium) -%3E Optional%5BDict%5BSite, str%5D%5D:%0A %22%22%22%0A Get the identifier of a given search query.%0A%0A :param query: the search query.%0A%0A :param medium: the medium type.%0A%0A :return: A dict of all identifiers for this search query for all sites,%0A None if nothing is found.%0A %22%22%22%0A raise NotImplementedError%0A%0A def set_identifier(self, name: str, medium: Medium,%0A site: Site, identifier: str):%0A %22%22%22%0A Set the identifier for a given name.%0A%0A :param name: the name.%0A%0A :param medium: the medium type.%0A%0A :param site: the site.%0A%0A :param identifier: the identifier.%0A %22%22%22%0A raise NotImplementedError%0A%0A def get_mal_title(self, id_: str, medium: Medium) -%3E Optional%5Bstr%5D:%0A %22%22%22%0A Get a MAL title by its id.%0A :param id_: th MAL id.%0A :param medium: the medium type.%0A :return: The MAL title if it's found.%0A %22%22%22%0A raise NotImplementedError%0A%0A def set_mal_title(self, id_: str, medium: Medium, title: str):%0A %22%22%22%0A Set the MAL title for a given id.%0A :param id_: the MAL id.%0A%0A :param medium: The medium type.%0A%0A :param title: The MAL title for the given id.%0A %22%22%22%0A raise NotImplementedError%0A%0A def medium_data_by_id(self, id_: str, medium: Medium,%0A site: Site) -%3E Optional%5Bdict%5D:%0A %22%22%22%0A Get data by id.%0A :param id_: the id.%0A :param medium: the medium type.%0A :param site: the site.%0A :return: the data for that id if found.%0A %22%22%22%0A raise NotImplementedError%0A%0A def set_medium_data(self, id_: str, medium: Medium, site: Site, data: dict):%0A %22%22%22%0A Set the data for a given id.%0A%0A :param id_: the id.%0A %0A :param medium: the medium type.%0A%0A :param site: the site.%0A%0A :param data: the data for the id.%0A %22%22%22%0A raise NotImplementedError%0A%0A async def get_medium_data(self, query: str,%0A medium: Medium, loop=None) -%3E Optional%5Bdict%5D:%0A %22%22%22%0A Get the cached data for the given search query.%0A%0A :param query: the search query.%0A%0A :param medium: the medium type.%0A%0A :param loop: the asyncio event loop, optional. If None is provided,%0A will use the default event loop.%0A%0A :return: the cached data, for all sites that has the data.%0A %22%22%22%0A id_dict = await await_func(%0A self.get_identifier, loop, query, medium%0A )%0A if not id_dict:%0A return%0A return %7Bsite: data for site, data in %7B%0A site: await await_func(self.medium_data_by_id, loop,%0A id_, medium, site)%0A for site, id_ in id_dict.items()%7D.items() if data%7D%0A
2ce67897ade1ce8ae8b0fd00671fe61f4164a2bc
Add missing migration
oidc_apis/migrations/0002_add_multiselect_field_ad_groups_option.py
oidc_apis/migrations/0002_add_multiselect_field_ad_groups_option.py
Python
0.0002
@@ -0,0 +1,771 @@ +# Generated by Django 2.0.9 on 2018-10-15 08:08%0A%0Afrom django.db import migrations%0Aimport multiselectfield.db.fields%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('oidc_apis', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='api',%0A name='required_scopes',%0A field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=%5B('email', 'E-mail'), ('profile', 'Profile'), ('address', 'Address'), ('github_username', 'GitHub username'), ('ad_groups', 'AD Groups')%5D, help_text='Select the scopes that this API needs information from. Information from the selected scopes will be included to the API Tokens.', max_length=1000, verbose_name='required scopes'),%0A ),%0A %5D%0A
785a5767ee3482fddee37327b4bf3edeed94ff46
Add shootout attempt item definition
db/shootout_attempt.py
db/shootout_attempt.py
Python
0
@@ -0,0 +1,1885 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport uuid%0A%0Afrom db.common import Base%0Afrom db.specific_event import SpecificEvent%0Afrom db.player import Player%0Afrom db.team import Team%0A%0A%0Aclass ShootoutAttempt(Base, SpecificEvent):%0A __tablename__ = 'shootout_attempts'%0A __autoload__ = True%0A%0A STANDARD_ATTRS = %5B%0A %22team_id%22, %22player_id%22, %22zone%22, %22goalie_team_id%22, %22goalie_id%22,%0A %22attempt_type%22, %22shot_type%22, %22miss_type%22, %22distance%22, %22on_goal%22,%0A %22scored%22%0A %5D%0A%0A def __init__(self, event_id, data_dict):%0A self.shootout_attempt_id = uuid.uuid4().urn%0A self.event_id = event_id%0A for attr in self.STANDARD_ATTRS:%0A if attr in data_dict:%0A setattr(self, attr, data_dict%5Battr%5D)%0A else:%0A if attr in %5B'scored', 'on_goal'%5D:%0A setattr(self, attr, False)%0A else:%0A setattr(self, attr, None)%0A%0A def __str__(self):%0A player = Player.find_by_id(self.player_id)%0A goalie = Player.find_by_id(self.goalie_id)%0A plr_team = Team.find_by_id(self.team_id)%0A goalie_team = Team.find_by_id(self.goalie_team_id)%0A if self.attempt_type == 'GOAL':%0A return %22Shootout Goal: %25s (%25s) %25s, %25d ft. vs. %25s (%25s)%22 %25 (%0A player.name, plr_team.abbr, self.shot_type, self.distance,%0A goalie.name, goalie_team.abbr)%0A elif self.attempt_type == 'MISS':%0A return %22Shootout Miss: %25s (%25s) %25s, %25d ft., %25s vs. %25s (%25s)%22 %25 (%0A player.name, plr_team.abbr, self.shot_type, self.distance,%0A self.miss_type, goalie.name, goalie_team.abbr)%0A elif self.attempt_type == 'SHOT':%0A return %22Shootout Shot: %25s (%25s) %25s, %25d ft. vs. %25s (%25s)%22 %25 (%0A player.name, plr_team.abbr, self.shot_type, self.distance,%0A goalie.name, goalie_team.abbr)%0A
1dcf698a286dcdf0f2c5a70d3e9bb2b32d046604
add TestBEvents, currently skipped
tests/unit/Events/test_BEvents.py
tests/unit/Events/test_BEvents.py
Python
0
@@ -0,0 +1,1617 @@ +from AlphaTwirl.Events import BEvents as Events%0Afrom AlphaTwirl.Events import Branch%0Aimport unittest%0Aimport ROOT%0A%0A##____________________________________________________________________________%7C%7C%0AinputPath = '/Users/sakuma/work/cms/c150130_RA1_data/c150130_01_PHYS14/20150331_SingleMu/TTJets/treeProducerSusyAlphaT/tree.root'%0AtreeName = 'tree'%0A%0A##____________________________________________________________________________%7C%7C%[email protected](%22skip BEvents%22)%0Aclass TestBEvents(unittest.TestCase):%0A%0A def test_branch(self):%0A inputFile = ROOT.TFile.Open(inputPath)%0A tree = inputFile.Get(treeName)%0A events = Events(tree)%0A%0A jet_pt = events.jet_pt%0A met_pt = events.met_pt%0A self.assertIsInstance(jet_pt, Branch)%0A self.assertIsInstance(met_pt, Branch)%0A%0A self.assertEqual(0, len(jet_pt))%0A self.assertEqual(1, len(met_pt))%0A self.assertEqual(0.0, met_pt%5B0%5D)%0A%0A tree.GetEntry(0)%0A self.assertEqual(2, len(jet_pt))%0A self.assertEqual(1, len(met_pt))%0A self.assertEqual(124.55626678466797, jet_pt%5B0%5D)%0A self.assertEqual(86.90544128417969, jet_pt%5B1%5D)%0A self.assertAlmostEqual(43.783382415771484, met_pt%5B0%5D)%0A%0A tree.GetEntry(1)%0A self.assertEqual(3, len(jet_pt))%0A self.assertEqual(1, len(met_pt))%0A self.assertEqual(112.48554992675781, jet_pt%5B0%5D)%0A self.assertEqual(52.32780075073242, jet_pt%5B1%5D)%0A self.assertEqual(48.861289978027344, jet_pt%5B2%5D)%0A self.assertAlmostEqual(20.483951568603516, met_pt%5B0%5D)%0A%0A##____________________________________________________________________________%7C%7C%0A
adf65027521124ea89e9c6c5ee2baf7366b2da46
Add example settings file for makam extractors
compmusic/extractors/makam/settings.example.py
compmusic/extractors/makam/settings.example.py
Python
0
@@ -0,0 +1,29 @@ +token = %22%22 # Dunya API Token%0A
9722bc3fc0a3cf8c95e91571b4b085e07e5a124c
Create 6kyu_message_from_aliens.py
Solutions/6kyu/6kyu_message_from_aliens.py
Solutions/6kyu/6kyu_message_from_aliens.py
Python
0.000027
@@ -0,0 +1,626 @@ +import re%0Afrom collections import Counter%0A%0Ad=%7B%0A'%7C-%7C':'h',%0A'%5B-':'e',%0A'()':'o',%0A'3%5D':'b',%0A'_%7C':'l',%0A'%7C':'i',%0A'%5E%7C':'p',%0A'/%60':'y',%0A')(':'o',%0A'?/':'r',%0A'%5C/':'a',%0A'%7C%5C%7C':'n',%0A'%3C/':'k',%0A'~%7C~':'t',%0A'=/':'f',%0A')%7C':'d',%0A'%7C_%7C':'u',%0A'(':'c',%0A'-%5B':'e',%0A'~%5C_':'s',%0A'-%5B':'e',%0A'%5D3':'b',%0A'_/~':'z',%0A'/%5C%5C/%5C%5C':'w',%0A'%3C%3E':'x',%0A'/%5C%5C':'v',%0A'%7C/%5C%7C':'m',%0A'_)(':'q',%0A'T_':'j',%0A',_(':'g',%0A'__':' '%0A%7D%0A%0A%0Adef decode(m):%0A splitters=%5B%22%5D%22,%22%7D%22,%22.%22,%22'%22,%22+%22%5D%0A splitter=%5Bi for i,j in Counter(m).most_common()%5D%5B0%5D%0A r=%5B%5D%0A for f in re.split('('+re.escape(splitter)+')+', m%5B::-1%5D):%0A try: r.append(d%5Bf%5D)%0A except: continue%0A return ''.join(r)%0A
125c75ea246c2d95f0addbb31b2d82dde588f21d
Add a unit test for KaggleKernelCredentials.
tests/test_kaggle_kernel_credentials.py
tests/test_kaggle_kernel_credentials.py
Python
0
@@ -0,0 +1,293 @@ +import unittest%0A%0Afrom kaggle_secrets import GcpTarget%0Afrom kaggle_gcp import KaggleKernelCredentials%0A%0Aclass TestKaggleKernelCredentials(unittest.TestCase):%0A%0A def test_default_target(self):%0A creds = KaggleKernelCredentials()%0A self.assertEqual(GcpTarget.BIGQUERY, creds.target)%0A