commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
a8f3491811bb639ebb59f79c55f461ae063b06b8
api/base/urls.py
api/base/urls.py
from django.conf import settings from django.conf.urls import include, url # from django.contrib import admin from django.conf.urls.static import static from . import views urlpatterns = [ ### API ### url(r'^$', views.root), url(r'^nodes/', include('api.nodes.urls', namespace='nodes')), url(r'^users/', include('api.users.urls', namespace='users')), url(r'^docs/', include('rest_framework_swagger.urls')), ] + static('/static/', document_root=settings.STATIC_ROOT)
from django.conf import settings from django.conf.urls import include, url, patterns # from django.contrib import admin from django.conf.urls.static import static from . import views urlpatterns = [ ### API ### url(r'^v2/', include(patterns('', url(r'^$', views.root), url(r'^nodes/', include('api.nodes.urls', namespace='nodes')), url(r'^users/', include('api.users.urls', namespace='users')), url(r'^docs/', include('rest_framework_swagger.urls')), )))] + static('/static/', document_root=settings.STATIC_ROOT)
Change API url prefix to 'v2'
Change API url prefix to 'v2'
Python
apache-2.0
TomBaxter/osf.io,cwisecarver/osf.io,pattisdr/osf.io,wearpants/osf.io,caseyrygt/osf.io,sbt9uc/osf.io,jmcarp/osf.io,adlius/osf.io,adlius/osf.io,GageGaskins/osf.io,dplorimer/osf,reinaH/osf.io,abought/osf.io,TomHeatwole/osf.io,petermalcolm/osf.io,hmoco/osf.io,pattisdr/osf.io,jeffreyliu3230/osf.io,zachjanicki/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,acshi/osf.io,mattclark/osf.io,cldershem/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,samchrisinger/osf.io,samanehsan/osf.io,SSJohns/osf.io,Ghalko/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,sloria/osf.io,KAsante95/osf.io,mattclark/osf.io,danielneis/osf.io,crcresearch/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,kch8qx/osf.io,abought/osf.io,icereval/osf.io,samchrisinger/osf.io,amyshi188/osf.io,jeffreyliu3230/osf.io,leb2dg/osf.io,danielneis/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,caneruguz/osf.io,alexschiller/osf.io,saradbowman/osf.io,pattisdr/osf.io,jolene-esposito/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,dplorimer/osf,reinaH/osf.io,jolene-esposito/osf.io,sloria/osf.io,barbour-em/osf.io,caneruguz/osf.io,samanehsan/osf.io,lyndsysimon/osf.io,jeffreyliu3230/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,wearpants/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,chennan47/osf.io,wearpants/osf.io,barbour-em/osf.io,SSJohns/osf.io,mluke93/osf.io,alexschiller/osf.io,caneruguz/osf.io,baylee-d/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,kch8qx/osf.io,mluke93/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,jinluyuan/osf.io,zamattiac/osf.io,fabianvf/osf.io,TomBaxter/osf.io,saradbowman/osf.io,laurenrevere/osf.io,hmoco/osf.io,kch8qx/osf.io,monikagrabowska/osf.io,icereval/osf.io,jnayak1/osf.io,GageGaskins/osf.io,doublebits/osf.io,asanfilippo7/osf.io,sloria/osf.io,mfraezz/osf.io,jnayak1/osf.io,crcresearch/osf.io,acshi/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,cslzchen/osf.io,cldershem/osf.io,mluke93/osf.io,doublebits/osf.io,HarryRybacki/osf.io,njantrania/osf.io,SSJohns/osf.io,ckc6cz/osf.io,bdyetton/prettychart,bdyetton/prettychart,bdyetton/prettychart,CenterForOpenScience/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,lyndsysimon/osf.io,caneruguz/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,ckc6cz/osf.io,doublebits/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,HarryRybacki/osf.io,GageGaskins/osf.io,njantrania/osf.io,zachjanicki/osf.io,danielneis/osf.io,abought/osf.io,doublebits/osf.io,reinaH/osf.io,hmoco/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,cldershem/osf.io,billyhunt/osf.io,laurenrevere/osf.io,acshi/osf.io,doublebits/osf.io,njantrania/osf.io,asanfilippo7/osf.io,MerlinZhang/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,KAsante95/osf.io,felliott/osf.io,HarryRybacki/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,hmoco/osf.io,barbour-em/osf.io,zamattiac/osf.io,mluo613/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,binoculars/osf.io,mluke93/osf.io,ticklemepierce/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,rdhyee/osf.io,mfraezz/osf.io,baylee-d/osf.io,Nesiehr/osf.io,emetsger/osf.io,jmcarp/osf.io,caseyrollins/osf.io,KAsante95/osf.io,mattclark/osf.io,zamattiac/osf.io,cosenal/osf.io,jnayak1/osf.io,emetsger/osf.io,rdhyee/osf.io,SSJohns/osf.io,mluo613/osf.io,binoculars/osf.io,ZobairAlijan/osf.io,jeffreyliu3230/osf.io,dplorimer/osf,brianjgeiger/osf.io,felliott/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,emetsger/osf.io,billyhunt/osf.io,barbour-em/osf.io,jinluyuan/osf.io,ckc6cz/osf.io,samchrisinger/osf.io,petermalcolm/osf.io,amyshi188/osf.io,erinspace/osf.io,adlius/osf.io,leb2dg/osf.io,haoyuchen1992/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,sbt9uc/osf.io,dplorimer/osf,cosenal/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,RomanZWang/osf.io,arpitar/osf.io,abought/osf.io,monikagrabowska/osf.io,samanehsan/osf.io,haoyuchen1992/osf.io,acshi/osf.io,arpitar/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,erinspace/osf.io,ticklemepierce/osf.io,adlius/osf.io,cosenal/osf.io,caseyrollins/osf.io,acshi/osf.io,erinspace/osf.io,jinluyuan/osf.io,cslzchen/osf.io,kwierman/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,baylee-d/osf.io,mfraezz/osf.io,jmcarp/osf.io,KAsante95/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,felliott/osf.io,kch8qx/osf.io,danielneis/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,felliott/osf.io,leb2dg/osf.io,kch8qx/osf.io,chrisseto/osf.io,mluo613/osf.io,icereval/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,arpitar/osf.io,kwierman/osf.io,amyshi188/osf.io,cldershem/osf.io,HarryRybacki/osf.io,chennan47/osf.io,GageGaskins/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,leb2dg/osf.io,njantrania/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,fabianvf/osf.io,fabianvf/osf.io,mfraezz/osf.io,laurenrevere/osf.io,arpitar/osf.io,zachjanicki/osf.io,emetsger/osf.io,bdyetton/prettychart,ckc6cz/osf.io,RomanZWang/osf.io,lyndsysimon/osf.io,KAsante95/osf.io,wearpants/osf.io,kwierman/osf.io,rdhyee/osf.io,reinaH/osf.io,fabianvf/osf.io,Johnetordoff/osf.io,ticklemepierce/osf.io,mluo613/osf.io,TomHeatwole/osf.io,jinluyuan/osf.io,alexschiller/osf.io,chennan47/osf.io,rdhyee/osf.io,alexschiller/osf.io,RomanZWang/osf.io,sbt9uc/osf.io,chrisseto/osf.io
--- +++ @@ -1,5 +1,5 @@ from django.conf import settings -from django.conf.urls import include, url +from django.conf.urls import include, url, patterns # from django.contrib import admin from django.conf.urls.static import static @@ -9,8 +9,9 @@ urlpatterns = [ ### API ### - url(r'^$', views.root), - url(r'^nodes/', include('api.nodes.urls', namespace='nodes')), - url(r'^users/', include('api.users.urls', namespace='users')), - url(r'^docs/', include('rest_framework_swagger.urls')), -] + static('/static/', document_root=settings.STATIC_ROOT) + url(r'^v2/', include(patterns('', + url(r'^$', views.root), + url(r'^nodes/', include('api.nodes.urls', namespace='nodes')), + url(r'^users/', include('api.users.urls', namespace='users')), + url(r'^docs/', include('rest_framework_swagger.urls')), + )))] + static('/static/', document_root=settings.STATIC_ROOT)
fb7754f15a8f0803c5417782e87d6fe153bf6d20
migrations/versions/201503061726_573faf4ac644_added_end_date_to_full_text_index_events.py
migrations/versions/201503061726_573faf4ac644_added_end_date_to_full_text_index_events.py
"""Added end_date to full text index events Revision ID: 573faf4ac644 Revises: 342fa3076650 Create Date: 2015-03-06 17:26:54.718493 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = '573faf4ac644' down_revision = '342fa3076650' def upgrade(): op.alter_column('event_index', 'start_date', nullable=False, schema='events') op.create_index('ix_start_date', 'event_index', ['start_date'], schema='events') op.add_column('event_index', sa.Column('end_date', sa.DateTime(), nullable=False, server_default='now()'), schema='events') op.alter_column('event_index', 'end_date', server_default=None, schema='events') op.create_index('ix_end_date', 'event_index', ['end_date'], schema='events') def downgrade(): op.alter_column('event_index', 'start_date', nullable=True, schema='events') op.drop_index('ix_start_date', table_name='event_index', schema='events') op.drop_column('event_index', 'end_date', schema='events')
"""Added end_date to full text index events Revision ID: 573faf4ac644 Revises: 342fa3076650 Create Date: 2015-03-06 17:26:54.718493 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = '573faf4ac644' down_revision = '342fa3076650' def upgrade(): op.alter_column('event_index', 'start_date', nullable=False, schema='events') op.create_index('ix_events_event_index_start_date', 'event_index', ['start_date'], schema='events') op.add_column('event_index', sa.Column('end_date', sa.DateTime(), nullable=False, server_default='now()'), schema='events') op.alter_column('event_index', 'end_date', server_default=None, schema='events') op.create_index('ix_events_event_index_end_date', 'event_index', ['end_date'], schema='events') def downgrade(): op.alter_column('event_index', 'start_date', nullable=True, schema='events') op.drop_index('ix_events_event_index_start_date', table_name='event_index', schema='events') op.drop_column('event_index', 'end_date', schema='events')
Use index name matching the current naming schema
Use index name matching the current naming schema
Python
mit
OmeGak/indico,mvidalgarcia/indico,pferreir/indico,indico/indico,DirkHoffmann/indico,indico/indico,mic4ael/indico,ThiefMaster/indico,ThiefMaster/indico,mvidalgarcia/indico,mic4ael/indico,DirkHoffmann/indico,ThiefMaster/indico,OmeGak/indico,indico/indico,OmeGak/indico,DirkHoffmann/indico,mvidalgarcia/indico,DirkHoffmann/indico,mic4ael/indico,ThiefMaster/indico,mic4ael/indico,mvidalgarcia/indico,OmeGak/indico,pferreir/indico,pferreir/indico,pferreir/indico,indico/indico
--- +++ @@ -16,17 +16,16 @@ def upgrade(): op.alter_column('event_index', 'start_date', nullable=False, schema='events') - op.create_index('ix_start_date', 'event_index', ['start_date'], schema='events') + op.create_index('ix_events_event_index_start_date', 'event_index', ['start_date'], schema='events') op.add_column('event_index', sa.Column('end_date', sa.DateTime(), nullable=False, server_default='now()'), schema='events') op.alter_column('event_index', 'end_date', server_default=None, schema='events') - op.create_index('ix_end_date', 'event_index', ['end_date'], schema='events') + op.create_index('ix_events_event_index_end_date', 'event_index', ['end_date'], schema='events') def downgrade(): op.alter_column('event_index', 'start_date', nullable=True, schema='events') - op.drop_index('ix_start_date', table_name='event_index', schema='events') - + op.drop_index('ix_events_event_index_start_date', table_name='event_index', schema='events') op.drop_column('event_index', 'end_date', schema='events')
029a159fe3f920d59e0168af72177b343daa4256
phased/__init__.py
phased/__init__.py
from django.conf import settings def generate_secret_delimiter(): try: from hashlib import sha1 except ImportError: from sha import sha as sha1 return sha1(getattr(settings, 'SECRET_KEY', '')).hexdigest() LITERAL_DELIMITER = getattr(settings, 'LITERAL_DELIMITER', generate_secret_delimiter())
from django.conf import settings from django.utils.hashcompat import sha_constructor def generate_secret_delimiter(): return sha_constructor(getattr(settings, 'SECRET_KEY', '')).hexdigest() LITERAL_DELIMITER = getattr(settings, 'LITERAL_DELIMITER', generate_secret_delimiter())
Make use of Django's hashcompat module.
Make use of Django's hashcompat module.
Python
bsd-3-clause
OmarIthawi/django-phased,mab2k/django-phased,mab2k/django-phased,codysoyland/django-phased,OmarIthawi/django-phased
--- +++ @@ -1,10 +1,7 @@ from django.conf import settings +from django.utils.hashcompat import sha_constructor def generate_secret_delimiter(): - try: - from hashlib import sha1 - except ImportError: - from sha import sha as sha1 - return sha1(getattr(settings, 'SECRET_KEY', '')).hexdigest() + return sha_constructor(getattr(settings, 'SECRET_KEY', '')).hexdigest() LITERAL_DELIMITER = getattr(settings, 'LITERAL_DELIMITER', generate_secret_delimiter())
c5a7feb3000bb3e234a3b87e8b20262eb9b94dfe
books/models.py
books/models.py
from django.contrib.auth.models import User from django.db import models from django.db.models import fields from django.utils import timezone class Transaction(models.Model): EXPENSE = 'exp' INCOME = 'inc' CATEGORY_CHOICES = ( (EXPENSE, 'expense'), (INCOME, 'income'), ) title = fields.CharField(max_length=255) amount = fields.DecimalField(max_digits=10, decimal_places=2) category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES) created = fields.DateTimeField(default=timezone.now, editable=False) modified = fields.DateTimeField(default=timezone.now) user = models.ForeignKey(User) def __str__(self): return "{}".format(self.title)
from django.contrib.auth.models import User from django.db import models from django.db.models import fields from django.utils import timezone class Transaction(models.Model): EXPENSE = 'exp' INCOME = 'inc' CATEGORY_CHOICES = ( (EXPENSE, 'expense'), (INCOME, 'income'), ) title = fields.CharField(max_length=255) amount = fields.DecimalField(max_digits=10, decimal_places=2) category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES) created = fields.DateTimeField(default=timezone.now, editable=False) modified = fields.DateTimeField(default=timezone.now) user = models.ForeignKey(User) def __str__(self): return "{}".format(self.title) class DebtLoan(models.Model): DEBT = 0 LOAN = 1 CATEGORY_CHOICES = ( (DEBT, 'debt'), (LOAN, 'loan'), ) with_who = fields.CharField(max_length=255) title = fields.CharField(max_length=255, null=True, blank=True) amount = fields.DecimalField(max_digits=10, decimal_places=2) category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES) created = fields.DateTimeField(default=timezone.now, editable=False) modified = fields.DateTimeField(default=timezone.now) user = models.ForeignKey(User) def __str__(self): if self.title: return "{}: {}".format(self.with_who, self.title) else: return "{}".format(self.with_who)
Create new model for debts and loans
Create new model for debts and loans
Python
mit
trimailov/finance,trimailov/finance,trimailov/finance
--- +++ @@ -21,3 +21,26 @@ def __str__(self): return "{}".format(self.title) + + +class DebtLoan(models.Model): + DEBT = 0 + LOAN = 1 + CATEGORY_CHOICES = ( + (DEBT, 'debt'), + (LOAN, 'loan'), + ) + + with_who = fields.CharField(max_length=255) + title = fields.CharField(max_length=255, null=True, blank=True) + amount = fields.DecimalField(max_digits=10, decimal_places=2) + category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES) + created = fields.DateTimeField(default=timezone.now, editable=False) + modified = fields.DateTimeField(default=timezone.now) + user = models.ForeignKey(User) + + def __str__(self): + if self.title: + return "{}: {}".format(self.with_who, self.title) + else: + return "{}".format(self.with_who)
39c0dfd7821355c9d2ff2274f4dd6292e959ed87
pronto/__init__.py
pronto/__init__.py
# coding: utf-8 """ **pronto**: a Python frontend to ontologies =========================================== """ from __future__ import absolute_import __all__ = ["Ontology", "Term", "TermList", "Relationship", "Parser"] __version__='0.5.0' __author__='Martin Larralde' __author_email__ = '[email protected]' try: from .ontology import Ontology from .term import Term, TermList from .relationship import Relationship from .parser import Parser except ImportError: pass
# coding: utf-8 """ **pronto**: a Python frontend to ontologies =========================================== """ from __future__ import absolute_import __all__ = ["Ontology", "Term", "TermList", "Relationship"] __version__='0.5.0' __author__='Martin Larralde' __author_email__ = '[email protected]' try: from .ontology import Ontology from .term import Term, TermList from .relationship import Relationship except ImportError: pass
Remove Parser from __all__ (from pronto import *)
Remove Parser from __all__ (from pronto import *)
Python
mit
althonos/pronto
--- +++ @@ -7,7 +7,7 @@ from __future__ import absolute_import -__all__ = ["Ontology", "Term", "TermList", "Relationship", "Parser"] +__all__ = ["Ontology", "Term", "TermList", "Relationship"] __version__='0.5.0' __author__='Martin Larralde' __author_email__ = '[email protected]' @@ -17,7 +17,6 @@ from .ontology import Ontology from .term import Term, TermList from .relationship import Relationship - from .parser import Parser except ImportError: pass
feefc687473b80adf30079e3ca23384459bb1558
protractor/test.py
protractor/test.py
# -*- coding: utf-8 -*- import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
# -*- coding: utf-8 -*- import os import subprocess class ProtractorTestCaseMixin(object): protractor_conf = 'protractor.conf.js' suite = None specs = None @classmethod def setUpClass(cls): super(ProtractorTestCaseMixin, cls).setUpClass() with open(os.devnull, 'wb') as f: subprocess.call(['webdriver-manager', 'update'], stdout=f, stderr=f) cls.webdriver = subprocess.Popen( ['webdriver-manager', 'start'], stdout=f, stderr=f) @classmethod def tearDownClass(cls): cls.webdriver.kill() super(ProtractorTestCaseMixin, cls).tearDownClass() def test_run(self): protractor_command = 'protractor {}'.format(self.protractor_conf) if self.specs: protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) protractor_command += ' --params.live_server_url={}'.format(self.live_server_url) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
Update to pass live server url as param to protractor
Update to pass live server url as param to protractor
Python
mit
penguin359/django-protractor,jpulec/django-protractor
--- +++ @@ -28,5 +28,6 @@ protractor_command += ' --specs {}'.format(','.join(self.specs)) if self.suite: protractor_command += ' --suite {}'.format(self.suite) + protractor_command += ' --params.live_server_url={}'.format(self.live_server_url) return_code = subprocess.call(protractor_command.split()) self.assertEqual(return_code, 0)
e78dd9bf1b9e1d20b8df34ee3328ee08afd45676
contrib/migrateticketmodel.py
contrib/migrateticketmodel.py
#!/usr/bin/env python # # This script completely migrates a <= 0.8.x Trac environment to use the new # default ticket model introduced in Trac 0.9. # # In particular, this means that the severity field is removed (or rather # disabled by removing all possible values), and the priority values are # changed to the more meaningful new defaults. # # Make sure to make a backup of the Trac environment before running this! import sys from trac.env import open_environment from trac.ticket.model import Priority, Severity priority_mapping = { 'highest': 'blocker', 'high': 'critical', 'normal': 'major', 'low': 'minor', 'lowest': 'trivial' } def main(): if len(sys.argv) < 2: print >> sys.stderr, 'usage: %s /path/to/projenv' \ % os.path.basename(sys.argv[0]) sys.exit(2) env = open_environment(sys.argv[1]) db = env.get_db_cnx() for oldprio, newprio in priority_mapping.items(): priority = Priority(env, oldprio, db) priority.name = newprio priority.update(db) for severity in list(Severity.select(env, db)): severity.delete(db) db.commit() if __name__ == '__main__': main()
#!/usr/bin/env python # # This script completely migrates a <= 0.8.x Trac environment to use the new # default ticket model introduced in Trac 0.9. # # In particular, this means that the severity field is removed (or rather # disabled by removing all possible values), and the priority values are # changed to the more meaningful new defaults. # # Make sure to make a backup of the Trac environment before running this! import os import sys from trac.env import open_environment from trac.ticket.model import Priority, Severity priority_mapping = { 'highest': 'blocker', 'high': 'critical', 'normal': 'major', 'low': 'minor', 'lowest': 'trivial' } def main(): if len(sys.argv) < 2: print >> sys.stderr, 'usage: %s /path/to/projenv' \ % os.path.basename(sys.argv[0]) sys.exit(2) env = open_environment(sys.argv[1]) db = env.get_db_cnx() for oldprio, newprio in priority_mapping.items(): priority = Priority(env, oldprio, db) priority.name = newprio priority.update(db) for severity in list(Severity.select(env, db)): severity.delete(db) db.commit() if __name__ == '__main__': main()
Fix missing import in contrib script added in [2630].
Fix missing import in contrib script added in [2630]. git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
netjunki/trac-Pygit2,walty8/trac,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,netjunki/trac-Pygit2,walty8/trac,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac
--- +++ @@ -9,6 +9,7 @@ # # Make sure to make a backup of the Trac environment before running this! +import os import sys from trac.env import open_environment
8974dc36e6ea0ab7b5ce3c78e9827d41cf1abcec
appengine_config.py
appengine_config.py
"""Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if '/diff/' in path: return '/X/diff/...' if '/diff2/' in path: return '/X/diff2/...' if '/patch/' in path: return '/X/patch/...' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Segregate Appstats by runtime (python vs. python27). appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME') # Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set # http://code.google.com/appengine/docs/python/tools/libraries.html#Django os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' # NOTE: All "main" scripts must import webapp.template before django.
"""Configuration.""" import logging import os import re from google.appengine.ext.appstats import recording logging.info('Loading %s from %s', __name__, __file__) # Custom webapp middleware to add Appstats. def webapp_add_wsgi_middleware(app): app = recording.appstats_wsgi_middleware(app) return app # Custom Appstats path normalization. def appstats_normalize_path(path): if path.startswith('/user/'): return '/user/X' if path.startswith('/user_popup/'): return '/user_popup/X' if '/diff/' in path: return '/X/diff/...' if '/diff2/' in path: return '/X/diff2/...' if '/patch/' in path: return '/X/patch/...' if path.startswith('/rss/'): i = path.find('/', 5) if i > 0: return path[:i] + '/X' return re.sub(r'\d+', 'X', path) # Segregate Appstats by runtime (python vs. python27). appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME') # Enable Interactive Playground. appstats_SHELL_OK = True # Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set # http://code.google.com/appengine/docs/python/tools/libraries.html#Django os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' # NOTE: All "main" scripts must import webapp.template before django.
Enable the Appstats Interactive Playground.
Enable the Appstats Interactive Playground.
Python
apache-2.0
Koulio/rietveld,gco/rietveld,andyzsf/rietveld,google-code-export/rietveld,kscharding/integral-solutions-smxq,rietveld-codereview/rietveld,google-code-export/rietveld,v3ss0n/rietveld,ericmckean/rietveld,openlabs/cr.openlabs.co.in,aungzanbaw/rietveld,robfig/rietveld,Koulio/rietveld,arg0/rietveld,sajingeo/rietveld,openlabs/cr.openlabs.co.in,sajingeo/rietveld,supriyantomaftuh/rietveld,xtypebee/rietveld,DeanHere/rietveld,Koulio/rietveld,google-code-export/rietveld,gavioto/rietveld,ericmckean/rietveld,salomon1184/rietveld,dushmis/rietveld,draem0507/rietveld,gco/rietveld,foolonhill/rietveld,DeanHere/rietveld,andyzsf/rietveld,dushmis/rietveld,supriyantomaftuh/rietveld,dushmis/rietveld,openlabs/cr.openlabs.co.in,sajingeo/rietveld,ericmckean/rietveld,DeanHere/rietveld,robfig/rietveld,robfig/rietveld,salomon1184/rietveld,foolonhill/rietveld,v3ss0n/rietveld,salomon1184/rietveld,aungzanbaw/rietveld,fuzan/rietveld,fuzan/rietveld,fuzan/rietveld,rietveld-codereview/rietveld,aungzanbaw/rietveld,gco/rietveld,rietveld-codereview/rietveld,xtypebee/rietveld,kscharding/integral-solutions-smxq,berkus/rietveld,gavioto/rietveld,foolonhill/rietveld,berkus/rietveld,berkus/rietveld,v3ss0n/rietveld,andyzsf/rietveld,xtypebee/rietveld,arg0/rietveld,rietveld-codereview/rietveld,draem0507/rietveld,arg0/rietveld,supriyantomaftuh/rietveld,kscharding/integral-solutions-smxq,gavioto/rietveld,draem0507/rietveld
--- +++ @@ -34,8 +34,10 @@ # Segregate Appstats by runtime (python vs. python27). appstats_KEY_NAMESPACE = '__appstats_%s__' % os.getenv('APPENGINE_RUNTIME') +# Enable Interactive Playground. +appstats_SHELL_OK = True + # Django 1.2+ requires DJANGO_SETTINGS_MODULE environment variable to be set -# http://code.google.com/appengine/docs/python/tools/libraries.html#Django +# http://code.google.com/appengine/docs/python/tools/libraries.html#Django os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' # NOTE: All "main" scripts must import webapp.template before django. -
176c03e26f46bad73df39c11ea4a190baca6fe54
apps/authentication/tests.py
apps/authentication/tests.py
from django.core.urlresolvers import reverse from django.test import TestCase class HTTPGetRootTestCase(TestCase): def setUp(self): pass def test_get_root_expect_http_200(self): url = reverse('microauth_authentication:index') response = self.client.get(url) self.assertEqual(200, response.status_code, 'Expect root view to load without issues.')
from django.conf import settings from django.core.urlresolvers import reverse from django.test import TestCase from django.test.utils import override_settings class HTTPGetRootTestCase(TestCase): def setUp(self): pass def test_get_root_expect_http_200(self): pipeline_settings = settings.PIPELINE pipeline_settings['PIPELINE_ENABLED'] = False with override_settings(PIPELINE_SETTINGS=pipeline_settings): url = reverse('microauth_authentication:index') response = self.client.get(url) self.assertEqual(200, response.status_code, 'Expect root view to load without issues.')
Make test not depend on django-pipeline
Make test not depend on django-pipeline
Python
mit
microserv/microauth,microserv/microauth,microserv/microauth
--- +++ @@ -1,5 +1,7 @@ +from django.conf import settings from django.core.urlresolvers import reverse from django.test import TestCase +from django.test.utils import override_settings class HTTPGetRootTestCase(TestCase): @@ -7,6 +9,9 @@ pass def test_get_root_expect_http_200(self): - url = reverse('microauth_authentication:index') - response = self.client.get(url) - self.assertEqual(200, response.status_code, 'Expect root view to load without issues.') + pipeline_settings = settings.PIPELINE + pipeline_settings['PIPELINE_ENABLED'] = False + with override_settings(PIPELINE_SETTINGS=pipeline_settings): + url = reverse('microauth_authentication:index') + response = self.client.get(url) + self.assertEqual(200, response.status_code, 'Expect root view to load without issues.')
4ee182b5561fcd333b7368471038e2692c8e2661
anchorhub/settings/default_settings.py
anchorhub/settings/default_settings.py
""" Defaults for all settings used by AnchorHub """ WRAPPER = '{ }' INPUT = '.' OUTPUT = 'out-anchorhub' ARGPARSER = { 'description': "anchorhub parses through Markdown files and precompiles " "links to specially formatted anchors." } ARGPARSE_INPUT = { 'help': "Path of directory tree to be parsed", } ARGPARSE_OUTPUT = { 'help': "Desired output location (default is \"" + OUTPUT + "\")", 'default': OUTPUT } ARGPARSE_OVERWRITE = { 'help': "Overwrite input files; ignore output location" } ARGPARSE_EXTENSION = { 'help': "Indicate which file extensions to search and run anchorhub on.", 'default': [".md"] } ARGPARSE_WRAPPER = { 'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")", 'default': WRAPPER }
""" Defaults for all settings used by AnchorHub """ WRAPPER = '{ }' INPUT = '.' OUTPUT = 'anchorhub-out' ARGPARSER = { 'description': "anchorhub parses through Markdown files and precompiles " "links to specially formatted anchors." } ARGPARSE_INPUT = { 'help': "Path of directory tree to be parsed", } ARGPARSE_OUTPUT = { 'help': "Desired output location (default is \"" + OUTPUT + "\")", 'default': OUTPUT } ARGPARSE_OVERWRITE = { 'help': "Overwrite input files; ignore output location" } ARGPARSE_EXTENSION = { 'help': "Indicate which file extensions to search and run anchorhub on.", 'default': [".md"] } ARGPARSE_WRAPPER = { 'help': "Specify custom wrapper format (default is \"" + WRAPPER + "\")", 'default': WRAPPER }
Make default output consistent with old version
Make default output consistent with old version I flipped the hyphen around: should be 'anchorhub-out', accidentally was 'out-anchorhub'
Python
apache-2.0
samjabrahams/anchorhub
--- +++ @@ -5,7 +5,7 @@ WRAPPER = '{ }' INPUT = '.' -OUTPUT = 'out-anchorhub' +OUTPUT = 'anchorhub-out' ARGPARSER = { 'description': "anchorhub parses through Markdown files and precompiles "
4dfa90cee753f6ef937a7a75aa347ec429fa1720
cms/__init__.py
cms/__init__.py
""" A collection of Django extensions that add content-management facilities to Django projects. Developed by Dave Hall. <http://etianen.com/> """ VERSION = (1, 8, 5)
""" A collection of Django extensions that add content-management facilities to Django projects. Developed by Dave Hall. <http://etianen.com/> """ VERSION = (1, 9)
Update version number to 1.9
Update version number to 1.9
Python
bsd-3-clause
danielsamuels/cms,danielsamuels/cms,jamesfoley/cms,jamesfoley/cms,jamesfoley/cms,dan-gamble/cms,dan-gamble/cms,lewiscollard/cms,lewiscollard/cms,jamesfoley/cms,lewiscollard/cms,dan-gamble/cms,danielsamuels/cms
--- +++ @@ -6,4 +6,4 @@ <http://etianen.com/> """ -VERSION = (1, 8, 5) +VERSION = (1, 9)
b501ee5dc2a41bf51f9f91c29501792338bf7269
automatron/backend/controller.py
automatron/backend/controller.py
from automatron.backend.plugin import PluginManager from automatron.controller.controller import IAutomatronClientActions from automatron.core.controller import BaseController class BackendController(BaseController): def __init__(self, config_file): BaseController.__init__(self, config_file) self.plugins = None def prepareService(self): # Load plugins self.plugins = PluginManager(self) def __getattr__(self, item): def proxy(*args): self.plugins.emit(IAutomatronClientActions[item], *args) return proxy
from functools import partial from automatron.backend.plugin import PluginManager from automatron.controller.controller import IAutomatronClientActions from automatron.core.controller import BaseController class BackendController(BaseController): def __init__(self, config_file): BaseController.__init__(self, config_file) self.plugins = None def prepareService(self): # Load plugins self.plugins = PluginManager(self) def __getattr__(self, item): return partial(self.plugins.emit, IAutomatronClientActions[item])
Use functools.partial for client action proxy.
Use functools.partial for client action proxy.
Python
mit
automatron/automatron
--- +++ @@ -1,3 +1,4 @@ +from functools import partial from automatron.backend.plugin import PluginManager from automatron.controller.controller import IAutomatronClientActions from automatron.core.controller import BaseController @@ -13,6 +14,4 @@ self.plugins = PluginManager(self) def __getattr__(self, item): - def proxy(*args): - self.plugins.emit(IAutomatronClientActions[item], *args) - return proxy + return partial(self.plugins.emit, IAutomatronClientActions[item])
7925afd27ead247a017baf7a7dff97986904055f
comics/views.py
comics/views.py
from django.views import generic from gallery.models import GalleryImage from gallery import queries from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" def get_queryset(self): query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) return query_set class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def __init__(self): super().__init__() self.issue = None def get_queryset(self): # Find Issue, then get gallery self.issue = Issue.objects.filter(arc__slug=self.kwargs.get("arc_slug")).get( slug=self.kwargs.get("issue_slug") ) query_set = super().get_queryset().filter(gallery__id=self.issue.gallery.id) return query_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["issue"] = self.issue # Set in get_queryset() context["next"] = queries.get_next_image( self.issue.gallery, self.object.sort_order ) context["previous"] = queries.get_previous_image( self.issue.gallery, self.object.sort_order ) return context
from django.views import generic from gallery.models import GalleryImage from gallery import queries from .models import Arc, Issue class IndexView(generic.ListView): model = Arc template_name = "comics/index.html" context_object_name = "arcs" class IssueView(generic.DetailView): model = Issue template_name = "comics/issue.html" def get_queryset(self): query_set = super().get_queryset().filter(arc__slug=self.kwargs.get("arc_slug")) return query_set class ComicPageView(generic.DetailView): model = GalleryImage template_name = "comics/comic_page.html" def __init__(self): super().__init__() self.issue = None def get_queryset(self): # Find Issue, then get gallery self.issue = Issue.objects.filter(arc__slug=self.kwargs.get("arc_slug")).get( slug=self.kwargs.get("issue_slug") ) query_set = super().get_queryset().filter(gallery__id=self.issue.gallery.id) return query_set def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["issue"] = self.issue # Set in get_queryset() gallery = self.issue.gallery sort_order = self.object.sort_order context["next"] = queries.get_next_image(gallery, sort_order) context["previous"] = queries.get_previous_image(gallery, sort_order) return context
Make it look nicer, possibly micro seconds faster
Make it look nicer, possibly micro seconds faster
Python
mit
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
--- +++ @@ -41,11 +41,10 @@ context = super().get_context_data(**kwargs) context["issue"] = self.issue # Set in get_queryset() - context["next"] = queries.get_next_image( - self.issue.gallery, self.object.sort_order - ) - context["previous"] = queries.get_previous_image( - self.issue.gallery, self.object.sort_order - ) + + gallery = self.issue.gallery + sort_order = self.object.sort_order + context["next"] = queries.get_next_image(gallery, sort_order) + context["previous"] = queries.get_previous_image(gallery, sort_order) return context
04416cd9652a9fdc3ab58664ab4b96cbaff3f698
simuvex/s_event.py
simuvex/s_event.py
import itertools event_id_count = itertools.count() class SimEvent(object): #def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None): def __init__(self, state, event_type, **kwargs): self.id = event_id_count.next() self.type = event_type self.ins_addr = state.scratch.ins_addr self.bbl_addr = state.scratch.bbl_addr self.stmt_idx = state.scratch.stmt_idx self.sim_procedure = state.scratch.sim_procedure.__class__ self.objects = dict(kwargs) def __repr__(self): return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys()) def _copy_event(self): c = self.__class__.__new__(self.__class__) c.id = self.id c.type = self.type c.bbl_addr = self.bbl_addr c.stmt_idx = self.stmt_idx c.sim_procedure = self.sim_procedure c.objects = dict(self.objects) return c
import itertools event_id_count = itertools.count() class SimEvent(object): #def __init__(self, address=None, stmt_idx=None, message=None, exception=None, traceback=None): def __init__(self, state, event_type, **kwargs): self.id = event_id_count.next() self.type = event_type self.ins_addr = state.scratch.ins_addr self.bbl_addr = state.scratch.bbl_addr self.stmt_idx = state.scratch.stmt_idx self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__ self.objects = dict(kwargs) def __repr__(self): return "<SimEvent %s %d, with fields %s>" % (self.type, self.id, self.objects.keys()) def _copy_event(self): c = self.__class__.__new__(self.__class__) c.id = self.id c.type = self.type c.bbl_addr = self.bbl_addr c.stmt_idx = self.stmt_idx c.sim_procedure = self.sim_procedure c.objects = dict(self.objects) return c
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
Set None instead of NoneType to SimEvent.sim_procedure to make pickle happy.
Python
bsd-2-clause
axt/angr,schieb/angr,angr/angr,tyb0807/angr,f-prettyland/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/angr,f-prettyland/angr,angr/angr,axt/angr,tyb0807/angr,iamahuman/angr,iamahuman/angr,chubbymaggie/angr,angr/simuvex,schieb/angr,iamahuman/angr,axt/angr,angr/angr,f-prettyland/angr,schieb/angr
--- +++ @@ -9,7 +9,7 @@ self.ins_addr = state.scratch.ins_addr self.bbl_addr = state.scratch.bbl_addr self.stmt_idx = state.scratch.stmt_idx - self.sim_procedure = state.scratch.sim_procedure.__class__ + self.sim_procedure = None if state.scratch.sim_procedure is None else state.scratch.sim_procedure.__class__ self.objects = dict(kwargs) def __repr__(self):
b1c1b28e58b59eac81954fb55570dfd389b99c0f
tests/acceptance/test_modify.py
tests/acceptance/test_modify.py
import datetime from nose.tools import assert_raises from scalymongo import Document from scalymongo.errors import ModifyFailedError from tests.acceptance.base_acceptance_test import BaseAcceptanceTest class ModifyableDocument(Document): __collection__ = __name__ __database__ = 'test' structure = { 'field': basestring, } class WhenModifyingDocumentAndPreconditionFails(BaseAcceptanceTest): def should_raise_ModifyFailedError(self): doc = self.connection.models.ModifyableDocument({'field': 'foo'}) doc.save() assert_raises( ModifyFailedError, doc.modify, {'field': 'not the correct value'}, {'$set': {'field': 'new value'}}, )
import datetime from nose.tools import assert_raises from scalymongo import Document from scalymongo.errors import ModifyFailedError from tests.acceptance.base_acceptance_test import BaseAcceptanceTest class BlogPostModifyExample(Document): __collection__ = __name__ __database__ = 'test' structure = { 'author': basestring, 'title': basestring, 'body': basestring, 'views': int, 'comments': [{ 'author': basestring, 'comment': basestring, 'rank': int, }], } default_values = { 'views': 0, } EXAMPLE_POST = { 'author': 'Alice', 'title': 'Writing Scalable Services with Python and MongoDB', 'body': 'Use ScalyMongo!', } class BlogPostTestCase(BaseAcceptanceTest): def setup(self): self.doc = self.connection.models.BlogPostModifyExample(EXAMPLE_POST) self.doc.save() def teardown(self): self.connection.models.BlogPostModifyExample.collection.drop() def is_document_up_to_date(self): """True if and only if ``self.doc`` reflects what's in the database.""" fresh_copy = self.connection.models.BlogPostModifyExample.find_one( self.doc.shard_key) return self.doc == fresh_copy def when_no_precondition_given_should_increment(self): self.doc.modify({'$inc': {'views': 1}}) assert self.doc.views == 1 self.doc.modify({'$inc': {'views': 5}}) assert self.doc.views == 6 assert self.is_document_up_to_date() def when_precondition_fails_should_raise_ModifyFailedError(self): assert_raises( ModifyFailedError, self.doc.modify, {'$set': {'author': 'Bob'}}, {'author': 'Not Alice'}, ) # The doc should not have been altered. assert self.doc.author == 'Alice' assert self.is_document_up_to_date() def when_precondition_passes_should_update_field(self): self.doc.modify( {'$set': {'views': 15}}, {'author': 'Alice'}, ) assert self.is_document_up_to_date()
Add more comprehensive testing of `modify`
acceptance: Add more comprehensive testing of `modify`
Python
bsd-3-clause
allancaffee/scaly-mongo
--- +++ @@ -7,23 +7,73 @@ from tests.acceptance.base_acceptance_test import BaseAcceptanceTest -class ModifyableDocument(Document): +class BlogPostModifyExample(Document): __collection__ = __name__ __database__ = 'test' structure = { - 'field': basestring, + 'author': basestring, + 'title': basestring, + 'body': basestring, + 'views': int, + 'comments': [{ + 'author': basestring, + 'comment': basestring, + 'rank': int, + }], + } + default_values = { + 'views': 0, } -class WhenModifyingDocumentAndPreconditionFails(BaseAcceptanceTest): +EXAMPLE_POST = { + 'author': 'Alice', + 'title': 'Writing Scalable Services with Python and MongoDB', + 'body': 'Use ScalyMongo!', +} - def should_raise_ModifyFailedError(self): - doc = self.connection.models.ModifyableDocument({'field': 'foo'}) - doc.save() + +class BlogPostTestCase(BaseAcceptanceTest): + + def setup(self): + self.doc = self.connection.models.BlogPostModifyExample(EXAMPLE_POST) + self.doc.save() + + def teardown(self): + self.connection.models.BlogPostModifyExample.collection.drop() + + def is_document_up_to_date(self): + """True if and only if ``self.doc`` reflects what's in the database.""" + fresh_copy = self.connection.models.BlogPostModifyExample.find_one( + self.doc.shard_key) + return self.doc == fresh_copy + + def when_no_precondition_given_should_increment(self): + self.doc.modify({'$inc': {'views': 1}}) + assert self.doc.views == 1 + + self.doc.modify({'$inc': {'views': 5}}) + assert self.doc.views == 6 + + assert self.is_document_up_to_date() + + def when_precondition_fails_should_raise_ModifyFailedError(self): assert_raises( ModifyFailedError, - doc.modify, - {'field': 'not the correct value'}, - {'$set': {'field': 'new value'}}, + self.doc.modify, + {'$set': {'author': 'Bob'}}, + {'author': 'Not Alice'}, ) + + # The doc should not have been altered. + assert self.doc.author == 'Alice' + assert self.is_document_up_to_date() + + def when_precondition_passes_should_update_field(self): + self.doc.modify( + {'$set': {'views': 15}}, + {'author': 'Alice'}, + ) + + assert self.is_document_up_to_date()
445a150982f2119b340d95edc66940e0ec54afbd
lib/ansiblelint/rules/NoFormattingInWhenRule.py
lib/ansiblelint/rules/NoFormattingInWhenRule.py
from ansiblelint import AnsibleLintRule class NoFormattingInWhenRule(AnsibleLintRule): id = 'CINCH0001' shortdesc = 'No Jinja2 in when' description = '"when" lines should not include Jinja2 variables' tags = ['deprecated'] def _is_valid(self, when): if not isinstance(when, (str, unicode)): return True return when.find('{{') == -1 and when.find('}}') == -1 def matchplay(self, file, play): errors = [] if isinstance(play, dict): if 'roles' not in play: return errors for role in play['roles']: if self.matchtask(file, role): errors.append(({'when': role}, 'role "when" clause has Jinja2 templates')) if isinstance(play, list): for play_item in play: sub_errors = self.matchplay(file, play_item) if sub_errors: errors = errors + sub_errors return errors def matchtask(self, file, task): return 'when' in task and not self._is_valid(task['when'])
from ansiblelint import AnsibleLintRule try: from types import StringTypes except ImportError: # Python3 removed types.StringTypes StringTypes = str, class NoFormattingInWhenRule(AnsibleLintRule): id = 'CINCH0001' shortdesc = 'No Jinja2 in when' description = '"when" lines should not include Jinja2 variables' tags = ['deprecated'] def _is_valid(self, when): if not isinstance(when, StringTypes): return True return when.find('{{') == -1 and when.find('}}') == -1 def matchplay(self, file, play): errors = [] if isinstance(play, dict): if 'roles' not in play: return errors for role in play['roles']: if self.matchtask(file, role): errors.append(({'when': role}, 'role "when" clause has Jinja2 templates')) if isinstance(play, list): for play_item in play: sub_errors = self.matchplay(file, play_item) if sub_errors: errors = errors + sub_errors return errors def matchtask(self, file, task): return 'when' in task and not self._is_valid(task['when'])
Fix Python3 unicode test error
Fix Python3 unicode test error
Python
mit
willthames/ansible-lint,dataxu/ansible-lint,MatrixCrawler/ansible-lint
--- +++ @@ -1,4 +1,9 @@ from ansiblelint import AnsibleLintRule +try: + from types import StringTypes +except ImportError: + # Python3 removed types.StringTypes + StringTypes = str, class NoFormattingInWhenRule(AnsibleLintRule): @@ -8,7 +13,7 @@ tags = ['deprecated'] def _is_valid(self, when): - if not isinstance(when, (str, unicode)): + if not isinstance(when, StringTypes): return True return when.find('{{') == -1 and when.find('}}') == -1
0e48b2130cc53caa9beb9a5f8ce09edbcc40f1b8
ggplotx/tests/test_geom_point.py
ggplotx/tests/test_geom_point.py
from __future__ import absolute_import, division, print_function import pandas as pd from ggplotx import ggplot, aes, geom_point def test_aesthetics(): df = pd.DataFrame({ 'a': range(5), 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8, 'i': 9 }) p = (ggplot(df, aes(y='a')) + geom_point(aes(x='b')) + geom_point(aes(x='c', size='a')) + geom_point(aes(x='d', alpha='a'), size=10, show_legend=False) + geom_point(aes(x='e', shape='factor(a)'), size=10, show_legend=False) + geom_point(aes(x='f', color='factor(a)'), size=10, show_legend=False) + geom_point(aes(x='g', fill='a'), stroke=0, size=10, show_legend=False) + geom_point(aes(x='h', stroke='a'), fill='white', color='green', size=10) + geom_point(aes(x='i', shape='factor(a)'), fill='brown', stroke=2, size=10, show_legend=False)) assert p == 'aesthetics'
from __future__ import absolute_import, division, print_function import pandas as pd from ggplotx import ggplot, aes, geom_point, theme def test_aesthetics(): df = pd.DataFrame({ 'a': range(5), 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8, 'i': 9 }) p = (ggplot(df, aes(y='a')) + geom_point(aes(x='b')) + geom_point(aes(x='c', size='a')) + geom_point(aes(x='d', alpha='a'), size=10, show_legend=False) + geom_point(aes(x='e', shape='factor(a)'), size=10, show_legend=False) + geom_point(aes(x='f', color='factor(a)'), size=10, show_legend=False) + geom_point(aes(x='g', fill='a'), stroke=0, size=10, show_legend=False) + geom_point(aes(x='h', stroke='a'), fill='white', color='green', size=10) + geom_point(aes(x='i', shape='factor(a)'), fill='brown', stroke=2, size=10, show_legend=False) + theme(facet_spacing={'right': 0.85})) assert p == 'aesthetics'
Add space on the RHS of geom_point test
Add space on the RHS of geom_point test
Python
mit
has2k1/plotnine,has2k1/plotnine
--- +++ @@ -2,7 +2,7 @@ import pandas as pd -from ggplotx import ggplot, aes, geom_point +from ggplotx import ggplot, aes, geom_point, theme def test_aesthetics(): @@ -32,6 +32,7 @@ geom_point(aes(x='h', stroke='a'), fill='white', color='green', size=10) + geom_point(aes(x='i', shape='factor(a)'), - fill='brown', stroke=2, size=10, show_legend=False)) + fill='brown', stroke=2, size=10, show_legend=False) + + theme(facet_spacing={'right': 0.85})) assert p == 'aesthetics'
831c3f82abe5cc148f13f631f78b3c960460bbce
gitlabform/configuration/core.py
gitlabform/configuration/core.py
import os import logging import yaml from pathlib import Path class ConfigurationCore: config_from_file = None def __init__(self, config_path=None): try: if not config_path: config_path = os.path.join(Path.home(), '.gitlabform', 'config.yml') elif config_path in ['./config.yml', 'config.yml']: config_path = os.path.join(os.getcwd(), 'config.yml') logging.info("Reading config from: {}".format(config_path)) with open(config_path, 'r') as ymlfile: self.config_from_file = yaml.load(ymlfile) except Exception as e: raise ConfigFileNotFoundException(config_path) def get(self, path): """ :param path: "path" to given element in YAML file, for example for: group_settings: sddc: deploy_keys: qa_puppet: key: some key... title: some title... can_push: false ..a path to a single element array ['qa_puppet'] will be: "group_settings|sddc|deploy_keys". To get the dict under it use: get("group_settings|sddc|deploy_keys") :return: element from YAML file (dict, array, string...) """ tokens = path.split('|') current = self.config_from_file try: for token in tokens: current = current[token] except: raise KeyNotFoundException return current class ConfigFileNotFoundException(Exception): pass class KeyNotFoundException(Exception): pass
import os import logging import yaml from pathlib import Path class ConfigurationCore: config_from_file = None def __init__(self, config_path=None): try: if not config_path: config_path = os.path.join(Path.home(), '.gitlabform', 'config.yml') elif config_path in ['./config.yml', 'config.yml']: config_path = os.path.join(os.getcwd(), 'config.yml') logging.info("Reading config from: {}".format(config_path)) with open(config_path, 'r') as ymlfile: self.config_from_file = yaml.safe_load(ymlfile) except Exception as e: raise ConfigFileNotFoundException(config_path) def get(self, path): """ :param path: "path" to given element in YAML file, for example for: group_settings: sddc: deploy_keys: qa_puppet: key: some key... title: some title... can_push: false ..a path to a single element array ['qa_puppet'] will be: "group_settings|sddc|deploy_keys". To get the dict under it use: get("group_settings|sddc|deploy_keys") :return: element from YAML file (dict, array, string...) """ tokens = path.split('|') current = self.config_from_file try: for token in tokens: current = current[token] except: raise KeyNotFoundException return current class ConfigFileNotFoundException(Exception): pass class KeyNotFoundException(Exception): pass
Make loading config file safer
Make loading config file safer
Python
mit
egnyte/gitlabform,egnyte/gitlabform
--- +++ @@ -18,7 +18,7 @@ logging.info("Reading config from: {}".format(config_path)) with open(config_path, 'r') as ymlfile: - self.config_from_file = yaml.load(ymlfile) + self.config_from_file = yaml.safe_load(ymlfile) except Exception as e: raise ConfigFileNotFoundException(config_path)
9ee39df85da16503d5212a81adac316fcb00a3f6
src/features/hasWord.py
src/features/hasWord.py
from utils import preprocess_tweet def check_existence_of_words(tweet, wordlist): """ Function for the slang or curse words and acronyms features :param tweet: semi process tweet (hashtags mentions removed) :param wordlist:List of words :return: the binary vector of word in the tweet """ tweet=preprocess_tweet(tweet) boolean=0 for word in wordlist: if (tweet.find(word) != -1): boolean=1 break return [boolean]
from utils import preprocess_tweet def check_existence_of_words(tweet, wordlist): """ Function for the slang or curse words and acronyms features :param tweet: semi process tweet (hashtags mentions removed) :param wordlist:List of words :return: the binary vector of word in the tweet """ tweet=preprocess_tweet(tweet) boolean=0 for word in wordlist: if (tweet.find(word) != -1): boolean=1 break return [boolean]
Move preprocess tweet to utils, move loading wordlist to init
Move preprocess tweet to utils, move loading wordlist to init
Python
mit
iamhuy/rumour-veracity-verification
--- +++ @@ -7,10 +7,12 @@ :param wordlist:List of words :return: the binary vector of word in the tweet """ + tweet=preprocess_tweet(tweet) boolean=0 for word in wordlist: if (tweet.find(word) != -1): boolean=1 break + return [boolean]
614a996dd8227808e796a369ed0faf1f9427f780
organizer/views.py
organizer/views.py
from django.http.response import HttpResponse from .models import Tag def homepage(request): tag_list = Tag.objects.all() html_output = "<html>\n" html_output += "<head>\n" html_output += " <title>" html_output += "Don't Do This!</title>\n" html_output += "</head>\n" html_output += "<body>\n" html_output += " <ul>\n" for tag in tag_list: html_output += " <li>" html_output += tag.name.title() html_output += "</li>\n" html_output += " </ul>\n" html_output += "</body>\n" html_output += "</html>\n" return HttpResponse(html_output)
from django.http.response import HttpResponse from django.template import Context, loader from .models import Tag def homepage(request): tag_list = Tag.objects.all() template = loader.get_template( 'organizer/tag_list.html') context = Context({'tag_list': tag_list}) output = template.render(context) return HttpResponse(output)
Use template in homepage view.
Ch04: Use template in homepage view.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
--- +++ @@ -1,22 +1,13 @@ from django.http.response import HttpResponse +from django.template import Context, loader from .models import Tag def homepage(request): tag_list = Tag.objects.all() - html_output = "<html>\n" - html_output += "<head>\n" - html_output += " <title>" - html_output += "Don't Do This!</title>\n" - html_output += "</head>\n" - html_output += "<body>\n" - html_output += " <ul>\n" - for tag in tag_list: - html_output += " <li>" - html_output += tag.name.title() - html_output += "</li>\n" - html_output += " </ul>\n" - html_output += "</body>\n" - html_output += "</html>\n" - return HttpResponse(html_output) + template = loader.get_template( + 'organizer/tag_list.html') + context = Context({'tag_list': tag_list}) + output = template.render(context) + return HttpResponse(output)
7a24f314c426e55735836dd2f805d9e0364dc871
tarbell/hooks.py
tarbell/hooks.py
# -*- coding: utf-8 -*- hooks = { 'newproject': [], # (site) 'generate': [], # (site, dir, extra_context) 'publish': [], # (site, s3) 'install': [], # (site, project) 'preview': [], # (site) 'server_start': [], # (site) 'server_stop': [], # (site) } class register_hook(object): """ Register hook with @register_hook("EVENT") where EVENT is "newproject" etc. """ def __init__(self, event): self.event = event def __call__(self, f): # Avoid weird duplication names = ['{0}.{1}'.format(func.__module__, func.func_name) for func in hooks[self.event]] if '{0}.{1}'.format(f.__module__, f.func_name) not in names: hooks[self.event].append(f) return f
# -*- coding: utf-8 -*- hooks = { 'newproject': [], # (site) 'generate': [], # (site, dir, extra_context) 'publish': [], # (site, s3) 'install': [], # (site, project) 'preview': [], # (site) 'server_start': [], # (site) 'server_stop': [], # (site) } class register_hook(object): """ Register hook with @register_hook("EVENT") where EVENT is "newproject" etc. """ def __init__(self, event): self.event = event def __call__(self, f): # Avoid weird duplication names = ['{0}.{1}'.format(func.__module__, func.__name__) for func in hooks[self.event]] if '{0}.{1}'.format(f.__module__, f.__name__) not in names: hooks[self.event].append(f) return f
Switch to Python 3-friendly `function.__name__`
Switch to Python 3-friendly `function.__name__`
Python
bsd-3-clause
tarbell-project/tarbell,eyeseast/tarbell,tarbell-project/tarbell,eyeseast/tarbell
--- +++ @@ -19,7 +19,7 @@ def __call__(self, f): # Avoid weird duplication - names = ['{0}.{1}'.format(func.__module__, func.func_name) for func in hooks[self.event]] - if '{0}.{1}'.format(f.__module__, f.func_name) not in names: + names = ['{0}.{1}'.format(func.__module__, func.__name__) for func in hooks[self.event]] + if '{0}.{1}'.format(f.__module__, f.__name__) not in names: hooks[self.event].append(f) return f
eff279b0824b4bfb569fa82092c7ce9ac6e8c723
test/__init__.py
test/__init__.py
import sys sys.path.append('/home/blake/lazy/build/lib.linux-x86_64-3.4')
import sys sys.path.append('/home/blake/b/build/lib.linux-x86_64-3.4')
Fix manual path hack after relocate
Fix manual path hack after relocate
Python
apache-2.0
blake-sheridan/py,blake-sheridan/py
--- +++ @@ -1,3 +1,3 @@ import sys -sys.path.append('/home/blake/lazy/build/lib.linux-x86_64-3.4') +sys.path.append('/home/blake/b/build/lib.linux-x86_64-3.4')
e08395a35c37fa7f7c0311cc4c7a71537b8b4227
tests/misc/print_exception.py
tests/misc/print_exception.py
try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e)
try: import uio as io except ImportError: import io import sys if hasattr(sys, 'print_exception'): print_exception = sys.print_exception else: import traceback print_exception = lambda e, f: traceback.print_exception(None, e, sys.exc_info()[2], file=f) def print_exc(e): buf = io.StringIO() print_exception(e, buf) s = buf.getvalue() for l in s.split("\n"): # uPy on pyboard prints <stdin> as file, so remove filename. if l.startswith(" File "): l = l.split('"') print(l[0], l[2]) # uPy and CPy tracebacks differ in that CPy prints a source line for # each traceback entry. In this case, we know that offending line # has 4-space indent, so filter it out. elif not l.startswith(" "): print(l) # basic exception message try: 1/0 except Exception as e: print('caught') print_exc(e) # exception message with more than 1 source-code line def f(): g() def g(): 2/0 try: f() except Exception as e: print('caught') print_exc(e) # Here we have a function with lots of bytecode generated for a single source-line, and # there is an error right at the end of the bytecode. It should report the correct line. def f(): f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) return 1 try: f() except Exception as e: print_exc(e)
Add test for line number printing with large bytecode chunk.
tests/misc: Add test for line number printing with large bytecode chunk.
Python
mit
henriknelson/micropython,AriZuu/micropython,AriZuu/micropython,micropython/micropython-esp32,micropython/micropython-esp32,PappaPeppar/micropython,MrSurly/micropython,MrSurly/micropython-esp32,infinnovation/micropython,trezor/micropython,micropython/micropython-esp32,lowRISC/micropython,torwag/micropython,PappaPeppar/micropython,swegener/micropython,MrSurly/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,hiway/micropython,kerneltask/micropython,cwyark/micropython,adafruit/micropython,trezor/micropython,adafruit/micropython,bvernoux/micropython,henriknelson/micropython,pramasoul/micropython,kerneltask/micropython,MrSurly/micropython-esp32,trezor/micropython,cwyark/micropython,torwag/micropython,hiway/micropython,adafruit/circuitpython,cwyark/micropython,tobbad/micropython,MrSurly/micropython,adafruit/circuitpython,henriknelson/micropython,MrSurly/micropython-esp32,pramasoul/micropython,tralamazza/micropython,pozetroninc/micropython,deshipu/micropython,cwyark/micropython,chrisdearman/micropython,adafruit/circuitpython,HenrikSolver/micropython,hiway/micropython,oopy/micropython,henriknelson/micropython,ryannathans/micropython,dmazzella/micropython,swegener/micropython,pramasoul/micropython,adafruit/circuitpython,pozetroninc/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,toolmacher/micropython,ryannathans/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,chrisdearman/micropython,oopy/micropython,selste/micropython,pozetroninc/micropython,infinnovation/micropython,selste/micropython,pfalcon/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,Peetz0r/micropython-esp32,Peetz0r/micropython-esp32,dmazzella/micropython,puuu/micropython,pfalcon/micropython,tobbad/micropython,chrisdearman/micropython,lowRISC/micropython,oopy/micropython,PappaPeppar/micropython,hiway/micropython,pfalcon/micropython,alex-robbins/micropython,cwyark/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,TDAbboud/micropython,HenrikSolver/micropython,swegener/micropython,Peetz0r/micropython-esp32,MrSurly/micropython,torwag/micropython,alex-robbins/micropython,blazewicz/micropython,kerneltask/micropython,torwag/micropython,chrisdearman/micropython,trezor/micropython,alex-robbins/micropython,MrSurly/micropython-esp32,blazewicz/micropython,lowRISC/micropython,bvernoux/micropython,dmazzella/micropython,ryannathans/micropython,puuu/micropython,tobbad/micropython,ryannathans/micropython,pramasoul/micropython,AriZuu/micropython,Timmenem/micropython,blazewicz/micropython,bvernoux/micropython,tralamazza/micropython,MrSurly/micropython,swegener/micropython,blazewicz/micropython,deshipu/micropython,deshipu/micropython,trezor/micropython,selste/micropython,puuu/micropython,hiway/micropython,tralamazza/micropython,puuu/micropython,infinnovation/micropython,TDAbboud/micropython,TDAbboud/micropython,henriknelson/micropython,toolmacher/micropython,TDAbboud/micropython,selste/micropython,Timmenem/micropython,alex-robbins/micropython,pfalcon/micropython,oopy/micropython,AriZuu/micropython,lowRISC/micropython,pozetroninc/micropython,pramasoul/micropython,lowRISC/micropython,torwag/micropython,Timmenem/micropython,infinnovation/micropython,swegener/micropython,bvernoux/micropython,toolmacher/micropython,kerneltask/micropython,PappaPeppar/micropython,pfalcon/micropython,HenrikSolver/micropython,PappaPeppar/micropython,dmazzella/micropython,pozetroninc/micropython,tobbad/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,toolmacher/micropython,adafruit/micropython,HenrikSolver/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,chrisdearman/micropython,adafruit/circuitpython,adafruit/micropython,Timmenem/micropython,micropython/micropython-esp32,infinnovation/micropython,micropython/micropython-esp32,deshipu/micropython,adafruit/circuitpython,adafruit/micropython,kerneltask/micropython,selste/micropython,Timmenem/micropython,toolmacher/micropython,ryannathans/micropython,oopy/micropython,tobbad/micropython,MrSurly/micropython-esp32
--- +++ @@ -41,3 +41,13 @@ except Exception as e: print('caught') print_exc(e) + +# Here we have a function with lots of bytecode generated for a single source-line, and +# there is an error right at the end of the bytecode. It should report the correct line. +def f(): + f([1, 2], [1, 2], [1, 2], {1:1, 1:1, 1:1, 1:1, 1:1, 1:1, 1:X}) + return 1 +try: + f() +except Exception as e: + print_exc(e)
9d0b1990b979de19939cc37cbefb86e1a0cd4e0f
test/perf/perf.py
test/perf/perf.py
import numpy as np import pylab as pl import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t
import numpy as np import sys import timeit from pykalman import KalmanFilter N = int(sys.argv[1]) random_state = np.random.RandomState(0) transition_matrix = [[1, 0.01], [-0.01, 1]] transition_offset = [0.0,0.0] observation_matrix = [1.0,0] observation_offset = [0.0] transition_covariance = 1e-10*np.eye(2) observation_covariance = [0.1] initial_state_mean = [1.0,0.0] initial_state_covariance = [[1,0.1],[-0.1,1]] kf = KalmanFilter( transition_matrices=transition_matrix,observation_matrices=observation_matrix, transition_covariance=transition_covariance, observation_covariance=observation_covariance, transition_offsets=transition_offset, observation_offsets=observation_offset, initial_state_mean=initial_state_mean, initial_state_covariance=initial_state_covariance, random_state=random_state ) ts = np.linspace(0,0.01*1000,1000) observations = np.cos(ts) + np.sqrt(0.1) * random_state.randn(1000) states = np.cos(ts) t = timeit.timeit('filtered_state_estimates = kf.filter(observations)[0]','from __main__ import kf,observations',number=N) print t
Remove pylab from import statements
Remove pylab from import statements
Python
mit
wkearn/Kalman.jl,wkearn/Kalman.jl
--- +++ @@ -1,5 +1,4 @@ import numpy as np -import pylab as pl import sys import timeit from pykalman import KalmanFilter
5f4155201afa92f048f28b9cd53681a6bc7966ab
vendor/eventlet-0.9.15/eventlet/convenience.py
vendor/eventlet-0.9.15/eventlet/convenience.py
# The history of this repository has been rewritten to erase the vendor/ directory # Below is the md5sum and size of the file that was in the original commit bde0e3a3a15c9bbb8d96f4d8a370d8c7 5753
# The history of this repository has been rewritten to erase the vendor/ directory # Below is the md5sum and size of the file that was in the original commit 5b7615cc9b13cf39cfa39db53e86977a 5751
Drop eventlet bundle back to released state. Will workaround the bug we fixed there, in our own code.
Drop eventlet bundle back to released state. Will workaround the bug we fixed there, in our own code.
Python
mit
gratipay/aspen.py,gratipay/aspen.py
--- +++ @@ -1,4 +1,4 @@ # The history of this repository has been rewritten to erase the vendor/ directory # Below is the md5sum and size of the file that was in the original commit -bde0e3a3a15c9bbb8d96f4d8a370d8c7 -5753 +5b7615cc9b13cf39cfa39db53e86977a +5751
e5b503d0e66f8422412d0cdeac4ba4f55f14e420
spectrum/object.py
spectrum/object.py
# -*- coding: utf-8 -*- class Object: """Represents a generic Spectrum object Supported Operations: +-----------+--------------------------------------+ | Operation | Description | +===========+======================================+ | x == y | Checks if two objects are equal. | +-----------+--------------------------------------+ | x != y | Checks if two objects are not equal. | +-----------+--------------------------------------+ This is the class that will be the base class of most objects, since most have an ID number. id : int The ID of the object """ def __init__(self, id): self.id = int(id) def __eq__(self, other): return isinstance(other, self.__class__) and other.id == self.id def __ne__(self, other): if isinstance(other, self.__class__): return other.id != self.id return True
# -*- coding: utf-8 -*- class Object: """Represents a generic Spectrum object Supported Operations: +-----------+--------------------------------------+ | Operation | Description | +===========+======================================+ | x == y | Checks if two objects are equal. | +-----------+--------------------------------------+ | x != y | Checks if two objects are not equal. | +-----------+--------------------------------------+ This class is the base class of most objects, since most have an ID number. id : int The ID of the object """ def __init__(self, id): self.id = int(id) def __eq__(self, other): return isinstance(other, self.__class__) and other.id == self.id def __ne__(self, other): if isinstance(other, self.__class__): return other.id != self.id return True
Change wording from future to present tense
Documentation: Change wording from future to present tense
Python
mit
treefroog/spectrum.py
--- +++ @@ -14,7 +14,7 @@ | x != y | Checks if two objects are not equal. | +-----------+--------------------------------------+ - This is the class that will be the base class of most objects, since most + This class is the base class of most objects, since most have an ID number. id : int
9578081d1c6ce378687d605ba2350e08eddb6959
scipy/ndimage/segment/setup.py
scipy/ndimage/segment/setup.py
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('segment', parent_package, top_path) config.add_extension('_segmenter', sources=['Segmenter_EXT.c', 'Segmenter_IMPL.c'], depends = ['ndImage_Segmenter_structs.h'] ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('segment', parent_package, top_path) config.add_extension('_segmenter', sources=['Segmenter_EXT.c', 'Segmenter_IMPL.c'], depends = ['ndImage_Segmenter_structs.h'] ) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add tests as data_dir to ndimage.segment
Add tests as data_dir to ndimage.segment
Python
bsd-3-clause
jamestwebber/scipy,mdhaber/scipy,ChanderG/scipy,Kamp9/scipy,Stefan-Endres/scipy,rmcgibbo/scipy,gdooper/scipy,mtrbean/scipy,petebachant/scipy,matthewalbani/scipy,fredrikw/scipy,efiring/scipy,apbard/scipy,ales-erjavec/scipy,mikebenfield/scipy,Eric89GXL/scipy,Newman101/scipy,sriki18/scipy,andyfaff/scipy,Stefan-Endres/scipy,anntzer/scipy,jseabold/scipy,lukauskas/scipy,jor-/scipy,felipebetancur/scipy,befelix/scipy,jsilter/scipy,petebachant/scipy,mortada/scipy,mhogg/scipy,pyramania/scipy,niknow/scipy,zxsted/scipy,giorgiop/scipy,vhaasteren/scipy,anntzer/scipy,giorgiop/scipy,gef756/scipy,zerothi/scipy,fredrikw/scipy,grlee77/scipy,zaxliu/scipy,pschella/scipy,trankmichael/scipy,nmayorov/scipy,tylerjereddy/scipy,sonnyhu/scipy,aarchiba/scipy,surhudm/scipy,ilayn/scipy,mhogg/scipy,WarrenWeckesser/scipy,ilayn/scipy,mtrbean/scipy,sargas/scipy,richardotis/scipy,newemailjdm/scipy,Newman101/scipy,sauliusl/scipy,futurulus/scipy,e-q/scipy,vberaudi/scipy,pyramania/scipy,rgommers/scipy,nvoron23/scipy,FRidh/scipy,aarchiba/scipy,jor-/scipy,Newman101/scipy,ndchorley/scipy,teoliphant/scipy,trankmichael/scipy,newemailjdm/scipy,aman-iitj/scipy,niknow/scipy,giorgiop/scipy,WarrenWeckesser/scipy,juliantaylor/scipy,efiring/scipy,dch312/scipy,ndchorley/scipy,sargas/scipy,andim/scipy,raoulbq/scipy,mortonjt/scipy,ortylp/scipy,rgommers/scipy,pnedunuri/scipy,Eric89GXL/scipy,endolith/scipy,andim/scipy,ChanderG/scipy,fredrikw/scipy,jjhelmus/scipy,jjhelmus/scipy,minhlongdo/scipy,anielsen001/scipy,Shaswat27/scipy,Srisai85/scipy,pbrod/scipy,pbrod/scipy,jsilter/scipy,rmcgibbo/scipy,jjhelmus/scipy,richardotis/scipy,vanpact/scipy,ales-erjavec/scipy,Stefan-Endres/scipy,mikebenfield/scipy,vhaasteren/scipy,cpaulik/scipy,mdhaber/scipy,kalvdans/scipy,vanpact/scipy,newemailjdm/scipy,Srisai85/scipy,kleskjr/scipy,nvoron23/scipy,zxsted/scipy,mhogg/scipy,pizzathief/scipy,petebachant/scipy,matthewalbani/scipy,scipy/scipy,niknow/scipy,vberaudi/scipy,lhilt/scipy,matthewalbani/scipy,teoliphant/scipy,gfyoung/scipy,gertingold/scipy,matthewalbani/scipy,ogrisel/scipy,zaxliu/scipy,vhaasteren/scipy,mortonjt/scipy,jor-/scipy,vberaudi/scipy,vberaudi/scipy,scipy/scipy,endolith/scipy,FRidh/scipy,anntzer/scipy,pbrod/scipy,josephcslater/scipy,e-q/scipy,kleskjr/scipy,sonnyhu/scipy,efiring/scipy,andim/scipy,pbrod/scipy,pbrod/scipy,zxsted/scipy,felipebetancur/scipy,aeklant/scipy,sriki18/scipy,Kamp9/scipy,Gillu13/scipy,haudren/scipy,piyush0609/scipy,hainm/scipy,cpaulik/scipy,njwilson23/scipy,dominicelse/scipy,sauliusl/scipy,juliantaylor/scipy,mgaitan/scipy,sauliusl/scipy,lhilt/scipy,jamestwebber/scipy,mingwpy/scipy,nvoron23/scipy,matthewalbani/scipy,petebachant/scipy,matthew-brett/scipy,Eric89GXL/scipy,befelix/scipy,bkendzior/scipy,tylerjereddy/scipy,richardotis/scipy,lukauskas/scipy,fernand/scipy,befelix/scipy,juliantaylor/scipy,vanpact/scipy,ilayn/scipy,sonnyhu/scipy,mingwpy/scipy,fernand/scipy,vigna/scipy,ortylp/scipy,petebachant/scipy,e-q/scipy,nmayorov/scipy,andyfaff/scipy,behzadnouri/scipy,Kamp9/scipy,matthew-brett/scipy,Dapid/scipy,pschella/scipy,aman-iitj/scipy,trankmichael/scipy,mtrbean/scipy,cpaulik/scipy,dch312/scipy,aeklant/scipy,argriffing/scipy,chatcannon/scipy,vhaasteren/scipy,kleskjr/scipy,vhaasteren/scipy,sargas/scipy,bkendzior/scipy,rgommers/scipy,mdhaber/scipy,kalvdans/scipy,sauliusl/scipy,ndchorley/scipy,Shaswat27/scipy,maniteja123/scipy,gef756/scipy,kalvdans/scipy,dominicelse/scipy,arokem/scipy,ChanderG/scipy,arokem/scipy,mgaitan/scipy,mingwpy/scipy,Gillu13/scipy,zxsted/scipy,futurulus/scipy,aeklant/scipy,niknow/scipy,mdhaber/scipy,felipebetancur/scipy,perimosocordiae/scipy,ortylp/scipy,FRidh/scipy,aman-iitj/scipy,arokem/scipy,sonnyhu/scipy,arokem/scipy,apbard/scipy,Newman101/scipy,jseabold/scipy,andyfaff/scipy,jakevdp/scipy,pizzathief/scipy,scipy/scipy,andyfaff/scipy,dch312/scipy,jamestwebber/scipy,aeklant/scipy,lukauskas/scipy,piyush0609/scipy,apbard/scipy,lukauskas/scipy,woodscn/scipy,fredrikw/scipy,bkendzior/scipy,aarchiba/scipy,giorgiop/scipy,lhilt/scipy,andyfaff/scipy,josephcslater/scipy,pizzathief/scipy,jonycgn/scipy,sauliusl/scipy,niknow/scipy,mortonjt/scipy,perimosocordiae/scipy,richardotis/scipy,pschella/scipy,maniteja123/scipy,jjhelmus/scipy,woodscn/scipy,jsilter/scipy,pschella/scipy,woodscn/scipy,niknow/scipy,Eric89GXL/scipy,ales-erjavec/scipy,Dapid/scipy,jjhelmus/scipy,rmcgibbo/scipy,maciejkula/scipy,njwilson23/scipy,minhlongdo/scipy,lhilt/scipy,Shaswat27/scipy,ogrisel/scipy,tylerjereddy/scipy,mingwpy/scipy,mikebenfield/scipy,Kamp9/scipy,mortonjt/scipy,nmayorov/scipy,dch312/scipy,andim/scipy,kleskjr/scipy,minhlongdo/scipy,befelix/scipy,witcxc/scipy,nonhermitian/scipy,andim/scipy,zerothi/scipy,felipebetancur/scipy,WillieMaddox/scipy,sriki18/scipy,Srisai85/scipy,anntzer/scipy,aarchiba/scipy,chatcannon/scipy,surhudm/scipy,jakevdp/scipy,zaxliu/scipy,gertingold/scipy,perimosocordiae/scipy,aarchiba/scipy,rmcgibbo/scipy,pyramania/scipy,arokem/scipy,mhogg/scipy,endolith/scipy,maniteja123/scipy,fredrikw/scipy,kalvdans/scipy,behzadnouri/scipy,teoliphant/scipy,sonnyhu/scipy,fernand/scipy,perimosocordiae/scipy,bkendzior/scipy,behzadnouri/scipy,Newman101/scipy,Dapid/scipy,mortada/scipy,fernand/scipy,chatcannon/scipy,vanpact/scipy,jakevdp/scipy,person142/scipy,WarrenWeckesser/scipy,jseabold/scipy,maniteja123/scipy,tylerjereddy/scipy,mortada/scipy,cpaulik/scipy,ogrisel/scipy,josephcslater/scipy,vigna/scipy,jakevdp/scipy,FRidh/scipy,Kamp9/scipy,ortylp/scipy,zxsted/scipy,person142/scipy,juliantaylor/scipy,larsmans/scipy,nonhermitian/scipy,person142/scipy,perimosocordiae/scipy,gdooper/scipy,hainm/scipy,matthew-brett/scipy,pyramania/scipy,newemailjdm/scipy,richardotis/scipy,befelix/scipy,teoliphant/scipy,gdooper/scipy,tylerjereddy/scipy,njwilson23/scipy,zerothi/scipy,piyush0609/scipy,endolith/scipy,argriffing/scipy,gertingold/scipy,mortada/scipy,maniteja123/scipy,gef756/scipy,witcxc/scipy,mdhaber/scipy,njwilson23/scipy,grlee77/scipy,matthew-brett/scipy,vigna/scipy,nonhermitian/scipy,gef756/scipy,dominicelse/scipy,pnedunuri/scipy,ogrisel/scipy,raoulbq/scipy,pizzathief/scipy,raoulbq/scipy,mortada/scipy,anielsen001/scipy,gef756/scipy,hainm/scipy,behzadnouri/scipy,maniteja123/scipy,maciejkula/scipy,josephcslater/scipy,chatcannon/scipy,zxsted/scipy,WarrenWeckesser/scipy,grlee77/scipy,hainm/scipy,pbrod/scipy,vberaudi/scipy,anntzer/scipy,Srisai85/scipy,minhlongdo/scipy,jseabold/scipy,Srisai85/scipy,chatcannon/scipy,minhlongdo/scipy,Gillu13/scipy,mgaitan/scipy,teoliphant/scipy,woodscn/scipy,mhogg/scipy,matthew-brett/scipy,jseabold/scipy,futurulus/scipy,larsmans/scipy,lhilt/scipy,rgommers/scipy,mtrbean/scipy,Gillu13/scipy,scipy/scipy,surhudm/scipy,FRidh/scipy,pnedunuri/scipy,anntzer/scipy,newemailjdm/scipy,nonhermitian/scipy,Shaswat27/scipy,bkendzior/scipy,mortonjt/scipy,trankmichael/scipy,pizzathief/scipy,raoulbq/scipy,petebachant/scipy,mingwpy/scipy,larsmans/scipy,person142/scipy,futurulus/scipy,dominicelse/scipy,aeklant/scipy,trankmichael/scipy,nvoron23/scipy,ales-erjavec/scipy,haudren/scipy,nmayorov/scipy,anielsen001/scipy,woodscn/scipy,sonnyhu/scipy,gef756/scipy,FRidh/scipy,sriki18/scipy,pnedunuri/scipy,vanpact/scipy,lukauskas/scipy,e-q/scipy,pyramania/scipy,WillieMaddox/scipy,sargas/scipy,richardotis/scipy,felipebetancur/scipy,argriffing/scipy,gertingold/scipy,chatcannon/scipy,efiring/scipy,raoulbq/scipy,ortylp/scipy,Shaswat27/scipy,witcxc/scipy,mgaitan/scipy,gfyoung/scipy,efiring/scipy,mortonjt/scipy,jor-/scipy,WillieMaddox/scipy,ogrisel/scipy,ChanderG/scipy,mhogg/scipy,person142/scipy,kalvdans/scipy,pnedunuri/scipy,gfyoung/scipy,haudren/scipy,rmcgibbo/scipy,ndchorley/scipy,sargas/scipy,nvoron23/scipy,aman-iitj/scipy,ilayn/scipy,Srisai85/scipy,Dapid/scipy,Stefan-Endres/scipy,nvoron23/scipy,njwilson23/scipy,andim/scipy,andyfaff/scipy,nonhermitian/scipy,Eric89GXL/scipy,Gillu13/scipy,grlee77/scipy,mtrbean/scipy,Newman101/scipy,kleskjr/scipy,ortylp/scipy,ales-erjavec/scipy,zerothi/scipy,mikebenfield/scipy,gfyoung/scipy,jamestwebber/scipy,surhudm/scipy,ndchorley/scipy,aman-iitj/scipy,argriffing/scipy,jsilter/scipy,Eric89GXL/scipy,njwilson23/scipy,endolith/scipy,fernand/scipy,anielsen001/scipy,jor-/scipy,efiring/scipy,Dapid/scipy,jonycgn/scipy,felipebetancur/scipy,apbard/scipy,behzadnouri/scipy,larsmans/scipy,pnedunuri/scipy,Gillu13/scipy,fernand/scipy,mdhaber/scipy,sriki18/scipy,hainm/scipy,endolith/scipy,mgaitan/scipy,ilayn/scipy,larsmans/scipy,anielsen001/scipy,mortada/scipy,argriffing/scipy,gfyoung/scipy,WarrenWeckesser/scipy,larsmans/scipy,giorgiop/scipy,gdooper/scipy,jseabold/scipy,surhudm/scipy,zerothi/scipy,ales-erjavec/scipy,jonycgn/scipy,juliantaylor/scipy,zaxliu/scipy,sauliusl/scipy,behzadnouri/scipy,dch312/scipy,anielsen001/scipy,scipy/scipy,gertingold/scipy,Stefan-Endres/scipy,piyush0609/scipy,cpaulik/scipy,mtrbean/scipy,vigna/scipy,rgommers/scipy,rmcgibbo/scipy,jonycgn/scipy,piyush0609/scipy,woodscn/scipy,futurulus/scipy,argriffing/scipy,zaxliu/scipy,maciejkula/scipy,grlee77/scipy,WillieMaddox/scipy,hainm/scipy,lukauskas/scipy,WillieMaddox/scipy,witcxc/scipy,zerothi/scipy,vanpact/scipy,WillieMaddox/scipy,minhlongdo/scipy,kleskjr/scipy,gdooper/scipy,ChanderG/scipy,piyush0609/scipy,ilayn/scipy,vhaasteren/scipy,Stefan-Endres/scipy,Dapid/scipy,trankmichael/scipy,jakevdp/scipy,aman-iitj/scipy,sriki18/scipy,haudren/scipy,nmayorov/scipy,apbard/scipy,jonycgn/scipy,haudren/scipy,scipy/scipy,maciejkula/scipy,jonycgn/scipy,vberaudi/scipy,fredrikw/scipy,perimosocordiae/scipy,jsilter/scipy,e-q/scipy,newemailjdm/scipy,dominicelse/scipy,josephcslater/scipy,Shaswat27/scipy,pschella/scipy,vigna/scipy,jamestwebber/scipy,raoulbq/scipy,witcxc/scipy,haudren/scipy,Kamp9/scipy,mingwpy/scipy,ChanderG/scipy,mikebenfield/scipy,zaxliu/scipy,mgaitan/scipy,futurulus/scipy,WarrenWeckesser/scipy,cpaulik/scipy,ndchorley/scipy,giorgiop/scipy,maciejkula/scipy,surhudm/scipy
--- +++ @@ -12,6 +12,8 @@ depends = ['ndImage_Segmenter_structs.h'] ) + config.add_data_dir('tests') + return config if __name__ == '__main__':
c06e28dae894823c0ae5385e0f9c047ceab8561c
zombies/tests.py
zombies/tests.py
from django.test import TestCase # Create your tests here. from django.test import TestCase from models import Story class StoryMethodTests(TestCase): def test_ensure_story_is_inserted(self): story = Story(name="Zombies on Campus",visits=1,description='Zombies desciption',picture='testpic') story.save() self.assertEquals((story.visits==1), True) self.assertEquals((story.name=='Zombies on Campus'), True) self.assertEquals((story.description=='Zombies desciption'), True) self.assertEquals((story.picture=='testpic'), True)
from django.test import TestCase # Create your tests here. from django.test import TestCase from models import Story, StoryPoint class StoryMethodTests(TestCase): def test_ensure_story_is_inserted(self): story = Story(name="Zombies on Campus",visits=1,description='Zombies desciption',picture='testpic') story.save() self.assertEquals((story.visits==1), True) self.assertEquals((story.name=='Zombies on Campus'), True) self.assertEquals((story.description=='Zombies desciption'), True) self.assertEquals((story.picture=='testpic'), True) def test_ensure_storyPoints_is_inserted(self): storyPoint = StoryPoint(description='You are in the library',choiceText='yes',experience=10,story_type='start',main_story_id_id=5,visits=1,story_point_id=1,picture='testpic2') storyPoint.save() self.assertEquals((storyPoint.description=='You are in the library'),True) self.assertEquals((storyPoint.choiceText=='yes'),True) self.assertEquals((storyPoint.experience==10),True) self.assertEquals((storyPoint.story_type=='start'),True) self.assertEquals((storyPoint.story_point_id==1),True) self.assertEquals((storyPoint.picture=='testpic2'),True) self.assertEquals((storyPoint.visits==1),True) self.assertEquals((storyPoint.main_story_id_id==5),True)
Test case 2 for table storypoint
Test case 2 for table storypoint
Python
apache-2.0
ITLabProject2016/internet_technology_lab_project,ITLabProject2016/internet_technology_lab_project,ITLabProject2016/internet_technology_lab_project
--- +++ @@ -3,7 +3,7 @@ # Create your tests here. from django.test import TestCase -from models import Story +from models import Story, StoryPoint class StoryMethodTests(TestCase): @@ -16,3 +16,18 @@ self.assertEquals((story.name=='Zombies on Campus'), True) self.assertEquals((story.description=='Zombies desciption'), True) self.assertEquals((story.picture=='testpic'), True) + + + + def test_ensure_storyPoints_is_inserted(self): + + storyPoint = StoryPoint(description='You are in the library',choiceText='yes',experience=10,story_type='start',main_story_id_id=5,visits=1,story_point_id=1,picture='testpic2') + storyPoint.save() + self.assertEquals((storyPoint.description=='You are in the library'),True) + self.assertEquals((storyPoint.choiceText=='yes'),True) + self.assertEquals((storyPoint.experience==10),True) + self.assertEquals((storyPoint.story_type=='start'),True) + self.assertEquals((storyPoint.story_point_id==1),True) + self.assertEquals((storyPoint.picture=='testpic2'),True) + self.assertEquals((storyPoint.visits==1),True) + self.assertEquals((storyPoint.main_story_id_id==5),True)
2baed20067fed71987bf7582fa9c9a5e53a63cb5
python/ql/test/experimental/library-tests/frameworks/stdlib/SafeAccessCheck.py
python/ql/test/experimental/library-tests/frameworks/stdlib/SafeAccessCheck.py
s = "taintedString" if s.startswith("tainted"): # $checks=s $branch=true pass
s = "taintedString" if s.startswith("tainted"): # $checks=s $branch=true pass sw = s.startswith # $f-:checks=s $f-:branch=true if sw("safe"): pass
Test false negative from review
Python: Test false negative from review
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
--- +++ @@ -2,3 +2,7 @@ if s.startswith("tainted"): # $checks=s $branch=true pass + +sw = s.startswith # $f-:checks=s $f-:branch=true +if sw("safe"): + pass
46ae5bbeab37f8e2fe14607c01e385d746c2d163
pymt/components.py
pymt/components.py
from __future__ import print_function __all__ = [] import os import sys import warnings import importlib from glob import glob from .framework.bmi_bridge import bmi_factory from .babel import setup_babel_environ def import_csdms_components(): debug = os.environ.get('PYMT_DEBUG', False) setup_babel_environ() if debug: print('Importing components with the following environment') for k, v in os.environ.items(): print('- {key}: {val}'.format(key=k, val=v)) try: csdms_module = importlib.import_module('csdms') except ImportError: warnings.warn('Unable to import csdms. Not loading components.') else: if debug: print('imported csdms module') files = glob(os.path.join(csdms_module.__path__[0], '*so')) _COMPONENT_NAMES = [ os.path.splitext(os.path.basename(f))[0] for f in files] if debug: print('found the following components') for name in _COMPONENT_NAMES: print('- {name}'.format(name=name)) for name in _COMPONENT_NAMES: module_name = '.'.join(['csdms', name]) try: module = importlib.import_module(module_name) except ImportError: if debug: print('unable to import {mod}'.format(mod=module_name)) else: if debug: print('imported {mod}'.format(mod=module_name)) if name in module.__dict__: try: setattr(sys.modules[__name__], name, bmi_factory(module.__dict__[name])) __all__.append(name) except Exception as err: print('warning: found csdms.{name} but was unable ' 'to wrap it'.format(name=name)) if debug: print(err) import_csdms_components()
__all__ = [] import sys from .plugin import load_csdms_plugins for plugin in load_csdms_plugins(): __all__.append(plugin.__name__) setattr(sys.modules[__name__], plugin.__name__, plugin)
Move csdms-plugin loading to plugin module.
Move csdms-plugin loading to plugin module.
Python
mit
csdms/pymt,csdms/coupling,csdms/coupling
--- +++ @@ -1,62 +1,9 @@ -from __future__ import print_function - __all__ = [] -import os import sys -import warnings -import importlib -from glob import glob - -from .framework.bmi_bridge import bmi_factory -from .babel import setup_babel_environ +from .plugin import load_csdms_plugins -def import_csdms_components(): - debug = os.environ.get('PYMT_DEBUG', False) - setup_babel_environ() - if debug: - print('Importing components with the following environment') - for k, v in os.environ.items(): - print('- {key}: {val}'.format(key=k, val=v)) - - try: - csdms_module = importlib.import_module('csdms') - except ImportError: - warnings.warn('Unable to import csdms. Not loading components.') - else: - if debug: - print('imported csdms module') - files = glob(os.path.join(csdms_module.__path__[0], '*so')) - _COMPONENT_NAMES = [ - os.path.splitext(os.path.basename(f))[0] for f in files] - - if debug: - print('found the following components') - for name in _COMPONENT_NAMES: - print('- {name}'.format(name=name)) - - for name in _COMPONENT_NAMES: - module_name = '.'.join(['csdms', name]) - try: - module = importlib.import_module(module_name) - except ImportError: - if debug: - print('unable to import {mod}'.format(mod=module_name)) - else: - if debug: - print('imported {mod}'.format(mod=module_name)) - - if name in module.__dict__: - try: - setattr(sys.modules[__name__], name, - bmi_factory(module.__dict__[name])) - __all__.append(name) - except Exception as err: - print('warning: found csdms.{name} but was unable ' - 'to wrap it'.format(name=name)) - if debug: - print(err) - - -import_csdms_components() +for plugin in load_csdms_plugins(): + __all__.append(plugin.__name__) + setattr(sys.modules[__name__], plugin.__name__, plugin)
1e66aba5a2c82b09a6485842948aad49c654efb4
scripts/load_topics_to_mongodb.py
scripts/load_topics_to_mongodb.py
import os import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open('topics.csv', 'rb') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }})
import os import sys import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open(sys.argv[1], 'r') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }})
Fix script for loading topics into mongodb
Fix script for loading topics into mongodb
Python
mit
xenx/recommendation_system,xenx/recommendation_system
--- +++ @@ -1,10 +1,11 @@ import os +import sys import csv from pymongo import MongoClient print('Parsing topics') topics = {} -with open('topics.csv', 'rb') as csvfile: +with open(sys.argv[1], 'r') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1:
eefa28f06620d568eda641b08c1caa9cff9a0c96
resourcemanager.py
resourcemanager.py
# Manage resources here import animation sounds = {} images = {} animations = {} loaded_resources = False def load_resources(): """Fills the structure above with the resources for the game. """ if loaded_resources: return loaded_resources = True
# Manage resources here import pygame from pygame.locals import * import animation sounds = {} images = {} animations = {} loaded_resources = False sound_defs = { "aoe" : "aoe.wav", "big hit" : "big_hit.wav", "burstfire" : "burstfire.wav", "explosion" : "explosion.wav", "fireball" : "fireball.wav", "hover" : "heavy_hover.wav", "high pitch" : "high_pitch.wav", "jump" : "jump.wav", "long swing" : "longswing.wav", "pickaxe" : "pickaxe.wav", "pickup" : "pickup.wav", "select" : "select.wav", "short swing" : "shortswing.wav", "spell" : "spell.wav", "summon" : "summon.wav", "teleport" : "teleport.wav" } def load_resources(): """Fills the structure above with the resources for the game. """ if loaded_resources: return loaded_resources = True for name, filename in sound_defs.iteritems(): sounds[name] = pygame.mixer.Sound(filename)
Add sound definitions to resource manager
Add sound definitions to resource manager
Python
mit
vwood/pyweek2013
--- +++ @@ -1,4 +1,7 @@ # Manage resources here + +import pygame +from pygame.locals import * import animation @@ -8,10 +11,33 @@ loaded_resources = False + +sound_defs = { + "aoe" : "aoe.wav", + "big hit" : "big_hit.wav", + "burstfire" : "burstfire.wav", + "explosion" : "explosion.wav", + "fireball" : "fireball.wav", + "hover" : "heavy_hover.wav", + "high pitch" : "high_pitch.wav", + "jump" : "jump.wav", + "long swing" : "longswing.wav", + "pickaxe" : "pickaxe.wav", + "pickup" : "pickup.wav", + "select" : "select.wav", + "short swing" : "shortswing.wav", + "spell" : "spell.wav", + "summon" : "summon.wav", + "teleport" : "teleport.wav" + } + def load_resources(): """Fills the structure above with the resources for the game. """ if loaded_resources: return loaded_resources = True + + for name, filename in sound_defs.iteritems(): + sounds[name] = pygame.mixer.Sound(filename)
95bc5231f88b9f24d9f7f7200fe069884138e97a
tests/__init__.py
tests/__init__.py
from test_character import * from test_data import * from test_exceptions import * from test_guild import * from test_raid import * from test_realm import * from test_regions import * from test_utils import *
Add the list of test modules to test quickly.
Add the list of test modules to test quickly. We can now start tests with: python -m unittest tests assuming the BNET_API_KEY is defined.
Python
mit
PuckCh/battlenet
--- +++ @@ -0,0 +1,8 @@ +from test_character import * +from test_data import * +from test_exceptions import * +from test_guild import * +from test_raid import * +from test_realm import * +from test_regions import * +from test_utils import *
e578c90cc542d3cf825645fa9376796a1e7c31f9
lib/cache.py
lib/cache.py
import functools import logging import redis import config # Default options redis_opts = { 'host': 'localhost', 'port': 6379, 'db': 0, 'password': None } redis_conn = None cache_prefix = None def init(): global redis_conn, cache_prefix cfg = config.load() cache = cfg.cache if not cache: return logging.info('Enabling storage cache on Redis') if not isinstance(cache, dict): cache = {} for k, v in cache.iteritems(): redis_opts[k] = v logging.info('Redis config: {0}'.format(redis_opts)) redis_conn = redis.StrictRedis(host=redis_opts['host'], port=int(redis_opts['port']), db=int(redis_opts['db']), password=redis_opts['password']) cache_prefix = 'cache_path:{0}'.format(cfg.get('storage_path', '/')) def cache_key(key): return cache_prefix + key def put(f): @functools.wraps(f) def wrapper(*args): content = args[-1] key = args[-2] key = cache_key(key) redis_conn.set(key, content) return f(*args) if redis_conn is None: return f return wrapper def get(f): @functools.wraps(f) def wrapper(*args): key = args[-1] key = cache_key(key) content = redis_conn.get(key) if content is not None: return content # Refresh cache content = f(*args) redis_conn.set(key, content) return content if redis_conn is None: return f return wrapper def remove(f): @functools.wraps(f) def wrapper(*args): key = args[-1] key = cache_key(key) redis_conn.delete(key) return f(*args) if redis_conn is None: return f return wrapper init()
import functools import logging import redis import config # Default options redis_opts = { 'host': 'localhost', 'port': 6379, 'db': 0, 'password': None } redis_conn = None cache_prefix = None def init(): global redis_conn, cache_prefix cfg = config.load() cache = cfg.cache if not cache: return logging.info('Enabling storage cache on Redis') if not isinstance(cache, dict): cache = {} for k, v in cache.iteritems(): redis_opts[k] = v logging.info('Redis config: {0}'.format(redis_opts)) redis_conn = redis.StrictRedis(host=redis_opts['host'], port=int(redis_opts['port']), db=int(redis_opts['db']), password=redis_opts['password']) cache_prefix = 'cache_path:{0}'.format(cfg.get('storage_path', '/')) init()
Remove unneeded lru specific helper methods
Remove unneeded lru specific helper methods
Python
apache-2.0
dalvikchen/docker-registry,atyenoria/docker-registry,atyenoria/docker-registry,ewindisch/docker-registry,docker/docker-registry,ken-saka/docker-registry,wakermahmud/docker-registry,Carrotzpc/docker-registry,kireal/docker-registry,ewindisch/docker-registry,yuriyf/docker-registry,whuwxl/docker-registry,Haitianisgood/docker-registry,GoogleCloudPlatform/docker-registry-driver-gcs,dedalusdev/docker-registry,cnh/docker-registry,HubSpot/docker-registry,yuriyf/docker-registry,deis/docker-registry,csrwng/docker-registry,wakermahmud/docker-registry,mdshuai/docker-registry,cnh/docker-registry,dalvikchen/docker-registry,dedalusdev/docker-registry,deis/docker-registry,alephcloud/docker-registry,depay/docker-registry,stormltf/docker-registry,docker/docker-registry,scrapinghub/docker-registry,pombredanne/docker-registry,depay/docker-registry,liggitt/docker-registry,atyenoria/docker-registry,dhiltgen/docker-registry,ken-saka/docker-registry,shipyard/docker-registry,stormltf/docker-registry,pombredanne/docker-registry,ActiveState/docker-registry,dhiltgen/docker-registry,nunogt/docker-registry,dalvikchen/docker-registry,HubSpot/docker-registry,andrew-plunk/docker-registry,shakamunyi/docker-registry,yuriyf/docker-registry,kireal/docker-registry,kireal/docker-registry,dhiltgen/docker-registry,mdshuai/docker-registry,HubSpot/docker-registry,fabianofranz/docker-registry,cnh/docker-registry,Haitianisgood/docker-registry,ptisserand/docker-registry,catalyst-zero/docker-registry,ken-saka/docker-registry,tangkun75/docker-registry,shakamunyi/docker-registry,mdshuai/docker-registry,GoogleCloudPlatform/docker-registry-driver-gcs,liggitt/docker-registry,dedalusdev/docker-registry,whuwxl/docker-registry,Carrotzpc/docker-registry,wakermahmud/docker-registry,deis/docker-registry,scrapinghub/docker-registry,hpcloud/docker-registry,ActiveState/docker-registry,viljaste/docker-registry-1,OnePaaS/docker-registry,OnePaaS/docker-registry,catalyst-zero/docker-registry,shakamunyi/docker-registry,hpcloud/docker-registry,tangkun75/docker-registry,csrwng/docker-registry,hpcloud/docker-registry,shipyard/docker-registry,mboersma/docker-registry,hex108/docker-registry,tangkun75/docker-registry,hex108/docker-registry,dine1987/Docker,Haitianisgood/docker-registry,fabianofranz/docker-registry,mboersma/docker-registry,Carrotzpc/docker-registry,ptisserand/docker-registry,nunogt/docker-registry,dine1987/Docker,ptisserand/docker-registry,docker/docker-registry,OnePaaS/docker-registry,andrew-plunk/docker-registry,scrapinghub/docker-registry,ActiveState/docker-registry,nunogt/docker-registry,mboersma/docker-registry,alephcloud/docker-registry,alephcloud/docker-registry,depay/docker-registry,csrwng/docker-registry,fabianofranz/docker-registry,shipyard/docker-registry,hex108/docker-registry,stormltf/docker-registry,whuwxl/docker-registry,viljaste/docker-registry-1,pombredanne/docker-registry,ewindisch/docker-registry,andrew-plunk/docker-registry,dine1987/Docker,viljaste/docker-registry-1,liggitt/docker-registry,catalyst-zero/docker-registry
--- +++ @@ -37,50 +37,4 @@ cache_prefix = 'cache_path:{0}'.format(cfg.get('storage_path', '/')) -def cache_key(key): - return cache_prefix + key - - -def put(f): - @functools.wraps(f) - def wrapper(*args): - content = args[-1] - key = args[-2] - key = cache_key(key) - redis_conn.set(key, content) - return f(*args) - if redis_conn is None: - return f - return wrapper - - -def get(f): - @functools.wraps(f) - def wrapper(*args): - key = args[-1] - key = cache_key(key) - content = redis_conn.get(key) - if content is not None: - return content - # Refresh cache - content = f(*args) - redis_conn.set(key, content) - return content - if redis_conn is None: - return f - return wrapper - - -def remove(f): - @functools.wraps(f) - def wrapper(*args): - key = args[-1] - key = cache_key(key) - redis_conn.delete(key) - return f(*args) - if redis_conn is None: - return f - return wrapper - - init()
52bb18cf1249e3f48764a7ed4e9546439692c5cb
packages/Python/lldbsuite/test/functionalities/data-formatter/synthcapping/fooSynthProvider.py
packages/Python/lldbsuite/test/functionalities/data-formatter/synthcapping/fooSynthProvider.py
import lldb class fooSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt) def num_children(self): return 3; def get_child_at_index(self, index): if index == 0: child = self.valobj.GetChildMemberWithName('a'); if index == 1: child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type); if index == 2: child = self.valobj.GetChildMemberWithName('r'); return child; def get_child_index(self, name): if name == 'a': return 0; if name == 'fake_a': return 1; return 2;
import lldb class fooSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt) def num_children(self): return 3; def get_child_at_index(self, index): if index == 0: child = self.valobj.GetChildMemberWithName('a'); if index == 1: child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type); if index == 2: child = self.valobj.GetChildMemberWithName('r'); return child; def get_child_index(self, name): if name == 'a': return 0; if name == 'fake_a': return 1; return 2;
Fix TestSyntheticCapping for Python 3.
Fix TestSyntheticCapping for Python 3. In Python 3, whitespace inconsistences are errors. This synthetic provider had mixed tabs and spaces, as well as inconsistent indentation widths. This led to the file not being imported, and naturally the test failing. No functional change here, just whitespace. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@258751 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb
--- +++ @@ -1,21 +1,21 @@ import lldb class fooSynthProvider: - def __init__(self, valobj, dict): - self.valobj = valobj; - self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt) - def num_children(self): - return 3; - def get_child_at_index(self, index): - if index == 0: - child = self.valobj.GetChildMemberWithName('a'); - if index == 1: - child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type); - if index == 2: - child = self.valobj.GetChildMemberWithName('r'); - return child; - def get_child_index(self, name): - if name == 'a': - return 0; - if name == 'fake_a': - return 1; - return 2; + def __init__(self, valobj, dict): + self.valobj = valobj; + self.int_type = valobj.GetType().GetBasicType(lldb.eBasicTypeInt) + def num_children(self): + return 3; + def get_child_at_index(self, index): + if index == 0: + child = self.valobj.GetChildMemberWithName('a'); + if index == 1: + child = self.valobj.CreateChildAtOffset ('fake_a', 1, self.int_type); + if index == 2: + child = self.valobj.GetChildMemberWithName('r'); + return child; + def get_child_index(self, name): + if name == 'a': + return 0; + if name == 'fake_a': + return 1; + return 2;
e6fa443412a909bc01e2dd8d9944ff3ddba35089
numpy/_array_api/_constants.py
numpy/_array_api/_constants.py
from .. import e, inf, nan, pi
from ._array_object import ndarray from ._dtypes import float64 import numpy as np e = ndarray._new(np.array(np.e, dtype=float64)) inf = ndarray._new(np.array(np.inf, dtype=float64)) nan = ndarray._new(np.array(np.nan, dtype=float64)) pi = ndarray._new(np.array(np.pi, dtype=float64))
Make the array API constants into dimension 0 arrays
Make the array API constants into dimension 0 arrays The spec does not actually specify whether these should be dimension 0 arrays or Python floats (which they are in NumPy). However, making them dimension 0 arrays is cleaner, and ensures they also have all the methods and attributes that are implemented on the ndarray object.
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
--- +++ @@ -1 +1,9 @@ -from .. import e, inf, nan, pi +from ._array_object import ndarray +from ._dtypes import float64 + +import numpy as np + +e = ndarray._new(np.array(np.e, dtype=float64)) +inf = ndarray._new(np.array(np.inf, dtype=float64)) +nan = ndarray._new(np.array(np.nan, dtype=float64)) +pi = ndarray._new(np.array(np.pi, dtype=float64))
f012d59f163a8b8a693dc894d211f077ae015d11
Instanssi/kompomaatti/tests.py
Instanssi/kompomaatti/tests.py
from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "http://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test that various YouTube URLs are parsed properly.""" for url in VALID_YOUTUBE_URLS: print("Test URL: %s" % url) self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456")
from django.test import TestCase from Instanssi.kompomaatti.models import Entry VALID_YOUTUBE_URLS = [ # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", "https://youtu.be/asdf123456/" ] class KompomaattiTests(TestCase): def setUp(self): pass def test_youtube_urls(self): """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", msg="failing URL: %s" % url)
Add more test data; improve feedback on failing case
kompomaatti: Add more test data; improve feedback on failing case
Python
mit
Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org
--- +++ @@ -3,15 +3,16 @@ VALID_YOUTUBE_URLS = [ - # must handle various protocols in the video URL + # must handle various protocols and hostnames in the video URL "http://www.youtube.com/v/asdf123456", "https://www.youtube.com/v/asdf123456/", "//www.youtube.com/v/asdf123456", "www.youtube.com/v/asdf123456", + "youtube.com/v/asdf123456/", # must handle various other ways to define the video "www.youtube.com/watch?v=asdf123456", "http://youtu.be/asdf123456", - "http://youtu.be/asdf123456/" + "https://youtu.be/asdf123456/" ] @@ -20,7 +21,7 @@ pass def test_youtube_urls(self): - """Test that various YouTube URLs are parsed properly.""" + """Test YouTube video id extraction from URLs.""" for url in VALID_YOUTUBE_URLS: - print("Test URL: %s" % url) - self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456") + self.assertEqual(Entry.youtube_url_to_id(url), "asdf123456", + msg="failing URL: %s" % url)
ed5efcadb03aed8fa48ebde618317d795387d1f5
smile_base/models/mail_mail.py
smile_base/models/mail_mail.py
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api, models, tools from openerp.addons.mail.mail_mail import _logger class MailMail(models.Model): _inherit = 'mail.mail' @api.cr_uid def process_email_queue(self, cr, uid, ids=None, context=None): if not tools.config.get('enable_email_sending'): _logger.warning('Email sending not enable') return True return super(MailMail, self).process_email_queue(cr, uid, ids, context)
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2011 Smile (<http://www.smile.fr>). All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api, models, tools from openerp.addons.mail.mail_mail import _logger class MailMail(models.Model): _inherit = 'mail.mail' @api.cr_uid def process_email_queue(self, cr, uid, ids=None, context=None): if not tools.config.get('enable_email_sending'): _logger.warning('Email sending not enabled') return True return super(MailMail, self).process_email_queue(cr, uid, ids, context)
Improve warning "Email sending not enabled"
[IMP] Improve warning "Email sending not enabled"
Python
agpl-3.0
odoocn/odoo_addons,tiexinliu/odoo_addons,bmya/odoo_addons,odoocn/odoo_addons,odoocn/odoo_addons,tiexinliu/odoo_addons,tiexinliu/odoo_addons,bmya/odoo_addons,bmya/odoo_addons
--- +++ @@ -29,6 +29,6 @@ @api.cr_uid def process_email_queue(self, cr, uid, ids=None, context=None): if not tools.config.get('enable_email_sending'): - _logger.warning('Email sending not enable') + _logger.warning('Email sending not enabled') return True return super(MailMail, self).process_email_queue(cr, uid, ids, context)
e40915461f9c371a56a364e1de27bcea05dc1a37
satori/__init__.py
satori/__init__.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # __all__ = ['__version__'] import pbr.version from .shell import main version_info = pbr.version.VersionInfo('satori') try: __version__ = version_info.version_string() except AttributeError: __version__ = None def discover(address=None): """Temporary to demo python API. TODO(zns): make it real """ main(argv=[address]) return {'address': address, 'other info': '...'}
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # __all__ = ['__version__'] import pbr.version from satori.shell import main version_info = pbr.version.VersionInfo('satori') try: __version__ = version_info.version_string() except AttributeError: __version__ = None def discover(address=None): """Temporary to demo python API. TODO(zns): make it real """ main(argv=[address]) return {'address': address, 'other info': '...'}
Fix flake8 error - relative import
Fix flake8 error - relative import
Python
apache-2.0
mgeisler/satori,mgeisler/satori,lil-cain/satori,rackerlabs/satori,rackerlabs/satori,lil-cain/satori,lil-cain/satori,samstav/satori,samstav/satori,samstav/satori
--- +++ @@ -15,7 +15,7 @@ import pbr.version -from .shell import main +from satori.shell import main version_info = pbr.version.VersionInfo('satori')
948c9c6ffb8a34e3acf00b8190bf65504f2bfaf6
app.py
app.py
import falcon from resources.waifu_message_resource import WaifuMessageResource api = falcon.API() api.add_route('/waifu/messages', WaifuMessageResource())
import falcon from resources.user_resource import UserResource, UserAuthResource from resources.waifu_message_resource import WaifuMessageResource from resources.waifu_resource import WaifuResource api = falcon.API() api.add_route('/user', UserResource()) api.add_route('/user/auth', UserAuthResource()) api.add_route('/waifu', WaifuResource()) api.add_route('/waifu/messages', WaifuMessageResource())
Add endpoints for all resources.
Add endpoints for all resources.
Python
cc0-1.0
sketchturnerr/WaifuSim-backend,sketchturnerr/WaifuSim-backend
--- +++ @@ -1,5 +1,11 @@ import falcon +from resources.user_resource import UserResource, UserAuthResource from resources.waifu_message_resource import WaifuMessageResource +from resources.waifu_resource import WaifuResource api = falcon.API() + +api.add_route('/user', UserResource()) +api.add_route('/user/auth', UserAuthResource()) +api.add_route('/waifu', WaifuResource()) api.add_route('/waifu/messages', WaifuMessageResource())
09dbb096a796f87e9b43e463d929796db7f96182
app.py
app.py
from flask import Flask, request import subprocess import uuid import os import re import json app = Flask(__name__) @app.route('/api/v1/', methods=["GET"]) def lint(): id = uuid.uuid4() filename = os.path.join("tmp", "{}.md".format(id)) with open(filename, "w+") as f: f.write(request.values['text']) out = subprocess.check_output("proselint {}".format(filename), shell=True) r = re.compile( "(?:.*).md:(?P<line>\d*):(?P<column>\d*): (?P<err>\w{6}) (?P<msg>.*)") out2 = sorted([r.search(line).groupdict() for line in out.splitlines()]) return json.dumps(out2) if __name__ == '__main__': app.debug = True app.run()
from flask import Flask, request import subprocess import uuid import os import re import json app = Flask(__name__) @app.route('/v1/', methods=["GET"]) def lint(): id = uuid.uuid4() filename = os.path.join("tmp", "{}.md".format(id)) with open(filename, "w+") as f: f.write(request.values['text']) out = subprocess.check_output("proselint {}".format(filename), shell=True) r = re.compile( "(?:.*).md:(?P<line>\d*):(?P<column>\d*): (?P<err>\w{6}) (?P<msg>.*)") out2 = sorted([r.search(line).groupdict() for line in out.splitlines()]) return json.dumps(out2) if __name__ == '__main__': app.debug = True app.run()
Remove redundant 'api' from URL
Remove redundant 'api' from URL
Python
bsd-3-clause
amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint
--- +++ @@ -8,7 +8,7 @@ app = Flask(__name__) [email protected]('/api/v1/', methods=["GET"]) [email protected]('/v1/', methods=["GET"]) def lint(): id = uuid.uuid4()
bea8b07180e6e9b2c786dfe37e12e75090363a1c
run.py
run.py
import os import json default_conf = { 'reddit': { 'username': '', 'password': '', }, 'twitter': { 'consumer_key': '', 'consumer_secret': '', 'access_token': '', 'access_secret': '', }, } if __name__ == '__main__': if not os.path.isfile('config.json'): config = json.dumps(default_conf, indent=4, sort_keys=True) with open('config.json', 'w') as f: f.write(config) print 'Created default config in config.json, please edit'
import os import json import sys default_conf = { 'reddit': { 'username': '', 'password': '', }, 'twitter': { 'consumer_key': '', 'consumer_secret': '', 'access_token': '', 'access_secret': '', }, } def write_conf(conf): config = json.dumps(conf, indent=4, sort_keys=True) with open('config.json', 'w') as f: f.write(config) if __name__ == '__main__': if not os.path.isfile('config.json'): write_conf(default_conf) print 'Created default config in config.json, please edit' elif 'updateconf' in sys.argv: with open('config.json', 'r') as f: config = json.loads(f.read()) default_conf.update(config) write_conf(default_conf)
Add twitter stuff to default config and allow easier merging of configs
Add twitter stuff to default config and allow easier merging of configs
Python
mit
r3m0t/TweetPoster,joealcorn/TweetPoster,tytek2012/TweetPoster,aperson/TweetPoster
--- +++ @@ -1,5 +1,6 @@ import os import json +import sys default_conf = { 'reddit': { @@ -15,10 +16,20 @@ } +def write_conf(conf): + config = json.dumps(conf, indent=4, sort_keys=True) + with open('config.json', 'w') as f: + f.write(config) + + if __name__ == '__main__': if not os.path.isfile('config.json'): - config = json.dumps(default_conf, indent=4, sort_keys=True) - with open('config.json', 'w') as f: - f.write(config) + write_conf(default_conf) + print 'Created default config in config.json, please edit' - print 'Created default config in config.json, please edit' + elif 'updateconf' in sys.argv: + with open('config.json', 'r') as f: + config = json.loads(f.read()) + + default_conf.update(config) + write_conf(default_conf)
9d65eaa14bc3f04ea998ed7bc43b7c71e5d232f7
v3/scripts/testing/create-8gb-metadata.py
v3/scripts/testing/create-8gb-metadata.py
#!/usr/bin/env python # -*- coding: utf8 -*- __author__ = 'eric' ''' Need to create some test data '''
#!/usr/bin/env python # -*- coding: utf8 -*- __author__ = 'eric' ''' Need to create some test data 8 gigabytes dataset '''
Test script for generating metadata
Test script for generating metadata
Python
mit
TheShellLand/pies,TheShellLand/pies
--- +++ @@ -5,4 +5,6 @@ ''' Need to create some test data + 8 gigabytes dataset ''' +
53d09ddacc92a52219a3cd18bba606840b870fcd
vumi_http_proxy/test/test_servicemaker.py
vumi_http_proxy/test/test_servicemaker.py
from vumi_http_proxy.servicemaker import Options, ProxyWorkerServiceMaker from vumi_http_proxy import http_proxy from twisted.trial import unittest class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080)
from vumi_http_proxy.servicemaker import ( Options, ProxyWorkerServiceMaker, client) from vumi_http_proxy import http_proxy from twisted.trial import unittest from vumi_http_proxy.test import helpers class TestOptions(unittest.TestCase): def test_defaults(self): options = Options() options.parseOptions([]) self.assertEqual(options["port"], 8080) self.assertEqual(str(options["interface"]), "0.0.0.0") def test_override(self): options = Options() options.parseOptions(["--port", 8000]) options.parseOptions(["--interface", "127.0.0.1"]) self.assertEqual(options["port"], "8000") self.assertEqual(str(options["interface"]), "127.0.0.1") class TestProxyWorkerServiceMaker(unittest.TestCase): def test_makeService(self): options = Options() options.parseOptions([]) self.patch(client, 'createResolver', lambda: helpers.TestResolver()) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory)) self.assertEqual(service.endpoint._interface, '0.0.0.0') self.assertEqual(service.endpoint._port, 8080)
Patch out DNS resolver in makeService tests.
Patch out DNS resolver in makeService tests.
Python
bsd-3-clause
praekelt/vumi-http-proxy,praekelt/vumi-http-proxy
--- +++ @@ -1,6 +1,8 @@ -from vumi_http_proxy.servicemaker import Options, ProxyWorkerServiceMaker +from vumi_http_proxy.servicemaker import ( + Options, ProxyWorkerServiceMaker, client) from vumi_http_proxy import http_proxy from twisted.trial import unittest +from vumi_http_proxy.test import helpers class TestOptions(unittest.TestCase): @@ -22,6 +24,7 @@ def test_makeService(self): options = Options() options.parseOptions([]) + self.patch(client, 'createResolver', lambda: helpers.TestResolver()) servicemaker = ProxyWorkerServiceMaker() service = servicemaker.makeService(options) self.assertTrue(isinstance(service.factory, http_proxy.ProxyFactory))
2cc55a25b13ac6575424ba70857a8419b796ca76
_tests/macro_testing/runner.py
_tests/macro_testing/runner.py
# -*- coding: utf-8 -*- import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(tests_path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
# -*- coding: utf-8 -*- import os, os.path import sys import unittest from macrotest import JSONSpecMacroTestCaseFactory def JSONTestCaseLoader(tests_path, recursive=False): """ Load JSON specifications for Jinja2 macro test cases from the given path and returns the resulting test classes. This function will create a MacroTestCase subclass (using JSONSpecMacrosTestCaseFactory) for each JSON file in the given path. If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path)) json_files = [f for f in os.listdir(path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. name, extension = os.path.splitext(json_file) class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class json_file_path = os.path.join(path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up globals()[class_name] = test_class if __name__ == '__main__': JSONTestCaseLoader('./tests/') unittest.main()
Make the paths not relative, so tests can be run from anywhere.
Make the paths not relative, so tests can be run from anywhere.
Python
cc0-1.0
imuchnik/cfgov-refresh,imuchnik/cfgov-refresh,imuchnik/cfgov-refresh,imuchnik/cfgov-refresh
--- +++ @@ -18,8 +18,9 @@ If `recursive` is True, it will also look in subdirectories. This is not yet supported. """ + path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), tests_path)) - json_files = [f for f in os.listdir(tests_path) if f.endswith('.json')] + json_files = [f for f in os.listdir(path) if f.endswith('.json')] for json_file in json_files: # Create a camelcased name for the test. This is a minor thing, but I # think it's nice. @@ -27,7 +28,7 @@ class_name = ''.join(x for x in name.title() if x not in ' _-') + 'TestCase' # Get the full path to the file and create a test class - json_file_path = os.path.join(tests_path, json_file) + json_file_path = os.path.join(path, json_file) test_class = JSONSpecMacroTestCaseFactory(class_name, json_file_path) # Add the test class to globals() so that unittest.main() picks it up
2cde3dbb69077054c6422cbe96e9b996be700d29
pulldb/api/subscriptions.py
pulldb/api/subscriptions.py
import json import logging from google.appengine.api import oauth from google.appengine.ext import ndb from pulldb import users from pulldb.api.base import OauthHandler, JsonModel from pulldb.base import create_app, Route from pulldb.models.subscriptions import Subscription, subscription_context class ListSubs(OauthHandler): def get(self): user_key = users.user_key(oauth.get_current_user(self.scope)) query = Subscription.query(ancestor=user_key) results = query.map(subscription_context) self.response.write(JsonModel().encode(list(results))) app = create_app([ Route('/api/subscriptions/list', 'pulldb.api.subscriptions.ListSubs'), ])
import json import logging from google.appengine.api import oauth from google.appengine.ext import ndb from pulldb import users from pulldb.api.base import OauthHandler, JsonModel from pulldb.base import create_app, Route from pulldb.models.subscriptions import Subscription, subscription_context class ListSubs(OauthHandler): def get(self): user_key = users.user_key(self.user) query = Subscription.query(ancestor=user_key) results = query.map(subscription_context) self.response.write(JsonModel().encode(list(results))) app = create_app([ Route('/api/subscriptions/list', 'pulldb.api.subscriptions.ListSubs'), ])
Make subscription handler less oauth dependant
Make subscription handler less oauth dependant
Python
mit
xchewtoyx/pulldb
--- +++ @@ -11,7 +11,7 @@ class ListSubs(OauthHandler): def get(self): - user_key = users.user_key(oauth.get_current_user(self.scope)) + user_key = users.user_key(self.user) query = Subscription.query(ancestor=user_key) results = query.map(subscription_context) self.response.write(JsonModel().encode(list(results)))
a18eb7509619914cd0565255730ed6bb40f14c9b
ovp_users/emails.py
ovp_users/emails.py
from django.core.mail import EmailMultiAlternatives from django.template import Context, Template from django.template.loader import get_template from django.conf import settings import threading class EmailThread(threading.Thread): def __init__(self, msg): self.msg = msg threading.Thread.__init__(self) def run (self): return self.msg.send() > 0 class BaseMail: """ This class is responsible for firing emails """ from_email = '' def __init__(self, user, async_mail=None): self.user = user self.async_mail = async_mail def sendEmail(self, template_name, subject, context): ctx = Context(context) text_content = get_template('email/{}.txt'.format(template_name)).render(ctx) html_content = get_template('email/{}.html'.format(template_name)).render(ctx) msg = EmailMultiAlternatives(subject, text_content, self.from_email, [self.user.email]) msg.attach_alternative(text_content, "text/plain") msg.attach_alternative(html_content, "text/html") if self.async_mail: async_flag="async" else: async_flag=getattr(settings, "DEFAULT_SEND_EMAIL", "async") if async_flag == "async": t = EmailThread(msg) t.start() return t else: return msg.send() > 0 class UserMail(BaseMail): """ This class is responsible for firing emails for Users """ def sendWelcome(self, context={}): """ Sent when user registers """ return self.sendEmail('welcome', 'Welcome', context) def sendRecoveryToken(self, context): """ Sent when volunteer requests recovery token """ return self.sendEmail('recoveryToken', 'Password recovery', context)
from ovp_core.emails import BaseMail class UserMail(BaseMail): """ This class is responsible for firing emails for Users """ def sendWelcome(self, context={}): """ Sent when user registers """ return self.sendEmail('welcome', 'Welcome', context) def sendRecoveryToken(self, context): """ Sent when volunteer requests recovery token """ return self.sendEmail('recoveryToken', 'Password recovery', context)
Move BaseMail from ovp-users to ovp-core
Move BaseMail from ovp-users to ovp-core
Python
agpl-3.0
OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users
--- +++ @@ -1,50 +1,4 @@ -from django.core.mail import EmailMultiAlternatives -from django.template import Context, Template -from django.template.loader import get_template -from django.conf import settings - -import threading - -class EmailThread(threading.Thread): - def __init__(self, msg): - self.msg = msg - threading.Thread.__init__(self) - - def run (self): - return self.msg.send() > 0 - - -class BaseMail: - """ - This class is responsible for firing emails - """ - from_email = '' - - def __init__(self, user, async_mail=None): - self.user = user - self.async_mail = async_mail - - def sendEmail(self, template_name, subject, context): - ctx = Context(context) - text_content = get_template('email/{}.txt'.format(template_name)).render(ctx) - html_content = get_template('email/{}.html'.format(template_name)).render(ctx) - - msg = EmailMultiAlternatives(subject, text_content, self.from_email, [self.user.email]) - msg.attach_alternative(text_content, "text/plain") - msg.attach_alternative(html_content, "text/html") - - - if self.async_mail: - async_flag="async" - else: - async_flag=getattr(settings, "DEFAULT_SEND_EMAIL", "async") - - if async_flag == "async": - t = EmailThread(msg) - t.start() - return t - else: - return msg.send() > 0 +from ovp_core.emails import BaseMail class UserMail(BaseMail): """
d940faee24f01d18e398da0592d88bed3908a46d
ox_herd/__init__.py
ox_herd/__init__.py
"""The ox_herd package provides a way for herding tests together (like oxen). """ VERSION = '0.6.10'
"""The ox_herd package provides a way for herding tests together (like oxen). """ VERSION = '0.6.11'
Fix bug in showing form
Fix bug in showing form
Python
bsd-2-clause
aocks/ox_herd,aocks/ox_herd,aocks/ox_herd
--- +++ @@ -1,4 +1,4 @@ """The ox_herd package provides a way for herding tests together (like oxen). """ -VERSION = '0.6.10' +VERSION = '0.6.11'
709d4386a308ce8c0767eab1f2174ec321ea59fd
client/main.py
client/main.py
import requests import yaml def runLoop( config ): """ Runs indefinitely. On user input (card swipe), will gather the card number, send it to the server configured, and if it has been authorized, open the relay with a GPIO call. """ while True: swipe = input() cardNumber = swipe print( 'The last input was ' + cardNumber ) try: res = queryServer( cardNumber, config ) except requests.exceptions.Timeout: print( "Server timeout!" ) continue if res['isAuthorized']: # open the relay here pass def queryServer( cardNumber, config ): url = 'http://' + str( config['serverAddress'] ) + ':' + str( config['serverPort'] ) req = requests.get( url, { 'cardNumber': cardNumber, 'machineID': config['machineID'], 'machineType': config['machineType'] }, timeout=config['timeout'] ) return req.json() if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) # run the main loop runLoop( config )
import requests import yaml def runLoop( config ): """ Runs indefinitely. On user input (card swipe), will gather the card number, send it to the server configured, and if it has been authorized, open the relay with a GPIO call. """ while True: swipe = input() cardNumber = swipe print( 'The last input was ' + cardNumber ) try: res = requestAuthorization( cardNumber, config ) except requests.exceptions.Timeout: print( "Server timeout!" ) continue if res['isAuthorized']: # open the relay here pass def requestAuthorization( cardNumber, config ): url = 'http://' + str( config['serverAddress'] ) + ':' + str( config['serverPort'] ) path = '/users/checkAuthorization' req = requests.get( url + path, { 'cardNumber': cardNumber, 'machineID': config['machineID'], 'machineType': config['machineType'] }, timeout=config['timeout'] ) return req.json() if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) # run the main loop runLoop( config )
Rename funciton to match corresponding HTTP request
Rename funciton to match corresponding HTTP request
Python
mit
aradler/Card-lockout,aradler/Card-lockout,aradler/Card-lockout
--- +++ @@ -15,7 +15,7 @@ print( 'The last input was ' + cardNumber ) try: - res = queryServer( cardNumber, config ) + res = requestAuthorization( cardNumber, config ) except requests.exceptions.Timeout: print( "Server timeout!" ) continue @@ -25,10 +25,11 @@ pass -def queryServer( cardNumber, config ): +def requestAuthorization( cardNumber, config ): url = 'http://' + str( config['serverAddress'] ) + ':' + str( config['serverPort'] ) + path = '/users/checkAuthorization' - req = requests.get( url, { + req = requests.get( url + path, { 'cardNumber': cardNumber, 'machineID': config['machineID'], 'machineType': config['machineType']
8960be78113ba122dbbac1ed26d82abf814bbfdf
src/lesson/main.py
src/lesson/main.py
# lesson/main.py # # This file is part of LESSON. LESSON is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2 or later. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright (C) 2012 Jonathan Dieter <[email protected]> import sys, os abspath = os.path.dirname(__file__) if abspath not in sys.path: sys.path.append(abspath) os.chdir(abspath) import render mode = "debug" try: from mod_wsgi import version print "Detected mod_wgi version %i.%i" % version mode = "wsgi" except: pass application = render.start(mode)
# lesson/main.py # # This file is part of LESSON. LESSON is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2 or later. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright (C) 2012 Jonathan Dieter <[email protected]> import sys, os abspath = os.path.dirname(__file__) if abspath not in sys.path: sys.path.append(abspath) os.chdir(abspath) import render mode = "debug" try: from mod_wsgi import version #@UnresolvedImport print "Detected mod_wgi version %i.%i" % version mode = "wsgi" except: pass application = render.start(mode)
Fix eclipse error message on mod_wsgi import
Fix eclipse error message on mod_wsgi import Signed-off-by: Jonathan Dieter <[email protected]>
Python
agpl-3.0
lesbg/lesson-backend
--- +++ @@ -26,7 +26,7 @@ mode = "debug" try: - from mod_wsgi import version + from mod_wsgi import version #@UnresolvedImport print "Detected mod_wgi version %i.%i" % version mode = "wsgi" except:
7206d68648c91790ac4fa14a3074c77c97c01636
mopidy/backends/base/__init__.py
mopidy/backends/base/__init__.py
import logging from .current_playlist import CurrentPlaylistController from .library import LibraryController, BaseLibraryProvider from .playback import PlaybackController, BasePlaybackProvider from .stored_playlists import (StoredPlaylistsController, BaseStoredPlaylistsProvider) logger = logging.getLogger('mopidy.backends.base') class Backend(object): #: The current playlist controller. An instance of #: :class:`mopidy.backends.base.CurrentPlaylistController`. current_playlist = None #: The library controller. An instance of # :class:`mopidy.backends.base.LibraryController`. library = None #: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`. mixer = None #: The playback controller. An instance of #: :class:`mopidy.backends.base.PlaybackController`. playback = None #: The stored playlists controller. An instance of #: :class:`mopidy.backends.base.StoredPlaylistsController`. stored_playlists = None #: List of URI prefixes this backend can handle. uri_handlers = []
import logging from .current_playlist import CurrentPlaylistController from .library import LibraryController, BaseLibraryProvider from .playback import PlaybackController, BasePlaybackProvider from .stored_playlists import (StoredPlaylistsController, BaseStoredPlaylistsProvider) logger = logging.getLogger('mopidy.backends.base') class Backend(object): #: The current playlist controller. An instance of #: :class:`mopidy.backends.base.CurrentPlaylistController`. current_playlist = None #: The library controller. An instance of # :class:`mopidy.backends.base.LibraryController`. library = None #: The playback controller. An instance of #: :class:`mopidy.backends.base.PlaybackController`. playback = None #: The stored playlists controller. An instance of #: :class:`mopidy.backends.base.StoredPlaylistsController`. stored_playlists = None #: List of URI prefixes this backend can handle. uri_handlers = []
Remove mixer from the Backend API as it is independent
Remove mixer from the Backend API as it is independent
Python
apache-2.0
adamcik/mopidy,vrs01/mopidy,pacificIT/mopidy,jmarsik/mopidy,jcass77/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,ZenithDK/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,tkem/mopidy,kingosticks/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,bencevans/mopidy,diandiankan/mopidy,quartz55/mopidy,glogiotatidis/mopidy,quartz55/mopidy,priestd09/mopidy,pacificIT/mopidy,SuperStarPL/mopidy,bacontext/mopidy,rawdlite/mopidy,mopidy/mopidy,bencevans/mopidy,pacificIT/mopidy,jodal/mopidy,diandiankan/mopidy,mopidy/mopidy,abarisain/mopidy,tkem/mopidy,SuperStarPL/mopidy,abarisain/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,bacontext/mopidy,adamcik/mopidy,swak/mopidy,ZenithDK/mopidy,quartz55/mopidy,hkariti/mopidy,vrs01/mopidy,ali/mopidy,vrs01/mopidy,woutervanwijk/mopidy,ali/mopidy,jodal/mopidy,dbrgn/mopidy,jmarsik/mopidy,jcass77/mopidy,ali/mopidy,jcass77/mopidy,liamw9534/mopidy,pacificIT/mopidy,hkariti/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,swak/mopidy,adamcik/mopidy,priestd09/mopidy,dbrgn/mopidy,mokieyue/mopidy,kingosticks/mopidy,tkem/mopidy,liamw9534/mopidy,rawdlite/mopidy,quartz55/mopidy,priestd09/mopidy,vrs01/mopidy,ali/mopidy,mokieyue/mopidy,bencevans/mopidy,bencevans/mopidy,mokieyue/mopidy,diandiankan/mopidy,bacontext/mopidy,jodal/mopidy,mopidy/mopidy,hkariti/mopidy,dbrgn/mopidy,ZenithDK/mopidy,tkem/mopidy,swak/mopidy,bacontext/mopidy,swak/mopidy,mokieyue/mopidy,rawdlite/mopidy,diandiankan/mopidy,hkariti/mopidy,SuperStarPL/mopidy
--- +++ @@ -17,9 +17,6 @@ # :class:`mopidy.backends.base.LibraryController`. library = None - #: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`. - mixer = None - #: The playback controller. An instance of #: :class:`mopidy.backends.base.PlaybackController`. playback = None
b24af9c3e4105d7acd2e9e13545f24d5a69ae230
saleor/product/migrations/0018_auto_20161212_0725.py
saleor/product/migrations/0018_auto_20161212_0725.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2016-12-12 13:25 from __future__ import unicode_literals from django.db import migrations from django.utils.text import slugify def create_slugs(apps, schema_editor): Value = apps.get_model('product', 'AttributeChoiceValue') for value in Value.objects.all(): value.slug = slugify(value.display) value.save() class Migration(migrations.Migration): dependencies = [ ('product', '0017_attributechoicevalue_slug'), ] operations = [ migrations.RunPython(create_slugs), ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2016-12-12 13:25 from __future__ import unicode_literals from django.db import migrations from django.utils.text import slugify def create_slugs(apps, schema_editor): Value = apps.get_model('product', 'AttributeChoiceValue') for value in Value.objects.all(): value.slug = slugify(value.display) value.save() class Migration(migrations.Migration): dependencies = [ ('product', '0017_attributechoicevalue_slug'), ] operations = [ migrations.RunPython(create_slugs, migrations.RunPython.noop), ]
Allow to revert data migaration
Allow to revert data migaration
Python
bsd-3-clause
KenMutemi/saleor,maferelo/saleor,jreigel/saleor,KenMutemi/saleor,jreigel/saleor,itbabu/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,tfroehlich82/saleor,KenMutemi/saleor,mociepka/saleor,car3oon/saleor,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,UITools/saleor,UITools/saleor,UITools/saleor,UITools/saleor,car3oon/saleor,jreigel/saleor,mociepka/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,maferelo/saleor,maferelo/saleor,mociepka/saleor,tfroehlich82/saleor
--- +++ @@ -20,5 +20,5 @@ ] operations = [ - migrations.RunPython(create_slugs), + migrations.RunPython(create_slugs, migrations.RunPython.noop), ]
9d0e9af5844772c18ca24d4012642d4518b66dfc
tests/test_judicious.py
tests/test_judicious.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `judicious` package.""" import pytest import judicious @pytest.fixture def response(): """Sample pytest fixture. See more at: http://doc.pytest.org/en/latest/fixture.html """ # import requests # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') def test_content(response): """Sample pytest test function with the pytest fixture as an argument.""" # from bs4 import BeautifulSoup # assert 'GitHub' in BeautifulSoup(response.content).title.string
#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for `judicious` package.""" import random import pytest import judicious def test_seeding(): r1 = random.random() r2 = random.random() judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff") r3 = random.random() r4 = random.random() judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff") r5 = random.random() r6 = random.random() judicious.seed() r7 = random.random() r8 = random.random() assert(r1 != r3) assert(r2 != r4) assert(r3 == r5) assert(r4 == r6) assert(r5 != r7) assert(r6 != r8) @pytest.fixture def response(): """Sample pytest fixture. See more at: http://doc.pytest.org/en/latest/fixture.html """ # import requests # return requests.get('https://github.com/audreyr/cookiecutter-pypackage') def test_content(response): """Sample pytest test function with the pytest fixture as an argument.""" # from bs4 import BeautifulSoup # assert 'GitHub' in BeautifulSoup(response.content).title.string
Add test of seeding PRNG
Add test of seeding PRNG
Python
mit
suchow/judicious,suchow/judicious,suchow/judicious
--- +++ @@ -3,10 +3,32 @@ """Tests for `judicious` package.""" +import random + import pytest +import judicious -import judicious + +def test_seeding(): + r1 = random.random() + r2 = random.random() + judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff") + r3 = random.random() + r4 = random.random() + judicious.seed("70d911d5-6d93-3c42-f9a4-53e493a79bff") + r5 = random.random() + r6 = random.random() + judicious.seed() + r7 = random.random() + r8 = random.random() + + assert(r1 != r3) + assert(r2 != r4) + assert(r3 == r5) + assert(r4 == r6) + assert(r5 != r7) + assert(r6 != r8) @pytest.fixture
d46d908f5cfafcb6962207c45f923d3afb7f35a7
pyrobus/__init__.py
pyrobus/__init__.py
from .robot import Robot from .modules import *
import logging from .robot import Robot from .modules import * nh = logging.NullHandler() logging.getLogger(__name__).addHandler(nh)
Add null handler as default for logging.
Add null handler as default for logging.
Python
mit
pollen/pyrobus
--- +++ @@ -1,2 +1,8 @@ +import logging + from .robot import Robot from .modules import * + + +nh = logging.NullHandler() +logging.getLogger(__name__).addHandler(nh)
c220c0a474a660c4c1167d42fdd0d48599b1b593
tests/test_pathutils.py
tests/test_pathutils.py
from os.path import join import sublime import sys from unittest import TestCase version = sublime.version() try: from libsass import pathutils except ImportError: from sublime_libsass.libsass import pathutils class TestPathutils(TestCase): def test_subpaths(self): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) def test_grep_r(self): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
from os.path import join, realpath import os import sublime import sys from unittest import TestCase from functools import wraps def subl_patch(pkg, obj=None): def subl_deco(fn): @wraps(fn) def wrap(*args): nonlocal pkg o = [] if obj != None: o += [obj] pkg = pkg + '.' + obj try: mock = __import__(pkg, globals(), locals(), o, 0) except ImportError: pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg mock = __import__(pkg, globals(), locals(), o, 0) args += (mock,) fn(*args) return wrap return subl_deco class TestPathutils(TestCase): @subl_patch('libsass', 'pathutils') def test_subpaths(self, pathutils): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) @subl_patch('libsass', 'pathutils') def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Make custom patch in package to test
Make custom patch in package to test
Python
mit
blitzrk/sublime_libsass,blitzrk/sublime_libsass
--- +++ @@ -1,24 +1,41 @@ -from os.path import join +from os.path import join, realpath +import os import sublime import sys from unittest import TestCase +from functools import wraps -version = sublime.version() -try: - from libsass import pathutils -except ImportError: - from sublime_libsass.libsass import pathutils +def subl_patch(pkg, obj=None): + def subl_deco(fn): + @wraps(fn) + def wrap(*args): + nonlocal pkg + o = [] + if obj != None: + o += [obj] + pkg = pkg + '.' + obj + try: + mock = __import__(pkg, globals(), locals(), o, 0) + except ImportError: + pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg + mock = __import__(pkg, globals(), locals(), o, 0) + args += (mock,) + fn(*args) + return wrap + return subl_deco class TestPathutils(TestCase): - def test_subpaths(self): + @subl_patch('libsass', 'pathutils') + def test_subpaths(self, pathutils): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) - def test_grep_r(self): + @subl_patch('libsass', 'pathutils') + def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
9eddd3b5c4635637faead9d7eae73efd2e324bdb
recipes/tests/test_views.py
recipes/tests/test_views.py
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from recipes.views import home_page from recipes.models import Recipe class HomePageViewTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/') self.assertEqual(found.func, home_page) def test_home_page_inherits_from_base_template(self): response = self.client.get('/') self.assertTemplateUsed(response, 'rotd/base.html') def test_home_page_uses_correct_template(self): request = HttpRequest() response = home_page(request) expected = render_to_string('recipes/home.html') self.assertEqual(response.content.decode(), expected) def test_home_page_has_recipe(self): Recipe.objects.create(name='test') response = self.client.get('/') self.assertIsInstance(response.context['recipe'], Recipe) def test_home_page_shows_any_recipe_name(self): Recipe.objects.create(name='test recipe') request = HttpRequest() response = home_page(request).content.decode() self.assertTrue(any([(recipe.name in response) for recipe in Recipe.objects.all()]))
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from recipes.views import home_page from recipes.models import Recipe class HomePageViewTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/') self.assertEqual(found.func, home_page) def test_home_page_inherits_from_base_template(self): response = self.client.get('/') self.assertTemplateUsed(response, 'rotd/base.html') def test_home_page_uses_correct_template(self): response = self.client.get('/') self.assertTemplateUsed(response, 'recipes/home.html') def test_home_page_has_recipe(self): Recipe.objects.create(name='test') response = self.client.get('/') self.assertIsInstance(response.context['recipe'], Recipe) def test_home_page_shows_any_recipe_name(self): Recipe.objects.create(name='test recipe') request = HttpRequest() response = home_page(request).content.decode() self.assertTrue(any([(recipe.name in response) for recipe in Recipe.objects.all()]))
Use the test client to check all templates for correctness
Use the test client to check all templates for correctness
Python
agpl-3.0
XeryusTC/rotd,XeryusTC/rotd,XeryusTC/rotd
--- +++ @@ -16,10 +16,8 @@ self.assertTemplateUsed(response, 'rotd/base.html') def test_home_page_uses_correct_template(self): - request = HttpRequest() - response = home_page(request) - expected = render_to_string('recipes/home.html') - self.assertEqual(response.content.decode(), expected) + response = self.client.get('/') + self.assertTemplateUsed(response, 'recipes/home.html') def test_home_page_has_recipe(self): Recipe.objects.create(name='test')
c1edc666630c03b6d85d9749e0695a9f19dd7c13
src/collectd_scripts/handle_collectd_notification.py
src/collectd_scripts/handle_collectd_notification.py
#!/usr/bin/python import sys import os import salt.client def getNotification(): notification_dict = {} isEndOfDictionary = False for line in sys.stdin: if not line.strip(): isEndOfDictionary = True continue if isEndOfDictionary: break key, value = line.split(':') notification_dict[key] = value.lstrip()[:-1] return notification_dict, line def postTheNotificationToSaltMaster(): salt_payload = {} threshold_dict = {} caller = salt.client.Caller() threshold_dict['tags'], threshold_dict['message'] = getNotification() tag = "skyring/collectd/node/{0}/threshold/{1}/{2}".format( threshold_dict['tags']["Host"], threshold_dict['tags']["Plugin"], threshold_dict['tags']["Severity"]) caller.sminion.functions['event.send'](tag, threshold_dict) if __name__ == '__main__': postTheNotificationToSaltMaster()
#!/usr/bin/python import sys import os import salt.client def getNotification(): notification_dict = {} isEndOfDictionary = False for line in sys.stdin: if not line.strip(): isEndOfDictionary = True continue if isEndOfDictionary: break key, value = line.split(':') notification_dict[key] = value.lstrip()[:-1] return notification_dict, line def postTheNotificationToSaltMaster(): salt_payload = {} threshold_dict = {} caller = salt.client.Caller() threshold_dict['tags'], threshold_dict['message'] = getNotification() threshold_dict['severity'] = threshold_dict['tags']["Severity"] tag = "skyring/collectd/node/{0}/threshold/{1}/{2}".format( threshold_dict['tags']["Host"], threshold_dict['tags']["Plugin"], threshold_dict['tags']["Severity"]) caller.sminion.functions['event.send'](tag, threshold_dict) if __name__ == '__main__': postTheNotificationToSaltMaster()
Fix in collectd event notifier script.
Skynet: Fix in collectd event notifier script. This patch adds a fix to collectd event notifier script, by providing a value the "severity" field in the event that it sends to salt-master event bus. with out that event listener in the skyring server will fail to process it. Change-Id: I20b738468c8022a25024e4327434ae6dab43a123 Signed-off-by: nnDarshan <[email protected]>
Python
apache-2.0
skyrings/skynet,skyrings/skynet
--- +++ @@ -23,6 +23,7 @@ threshold_dict = {} caller = salt.client.Caller() threshold_dict['tags'], threshold_dict['message'] = getNotification() + threshold_dict['severity'] = threshold_dict['tags']["Severity"] tag = "skyring/collectd/node/{0}/threshold/{1}/{2}".format( threshold_dict['tags']["Host"], threshold_dict['tags']["Plugin"],
545812b5e31b4894c600b2b172640bea27947db8
ecmd-core/pyecmd/test_api.py
ecmd-core/pyecmd/test_api.py
from pyecmd import * with Ecmd(fapi2="ver1"): t = loopTargets("pu", ECMD_SELECTED_TARGETS_LOOP)[0] data = t.getScom(0x1234) t.putScom(0x1234, 0x10100000) # These interfaces may not be defined for some plugins # Pull them to prevent compile issues #core_id, thread_id = t.targetToSequenceId() #unit_id_string = unitIdToString(2) #clock_state = t.queryClockState("SOMECLOCK") t.relatedTargets("pu.c") retval = t.queryFileLocationHidden2(ECMD_FILE_SCANDEF, "") for loc in retval.fileLocations: testval = loc.textFile + loc.hashFile + retval.version try: t.fapi2GetAttr("ATTR_DOES_NOT_EXIST") assert(""=="That was supposed to throw!") except KeyError: pass t.fapi2SetAttr("ATTR_CHIP_ID", 42) assert(42 == t.fapi2GetAttr("ATTR_CHIP_ID"))
from pyecmd import * extensions = {} if hasattr(ecmd, "fapi2InitExtension"): extensions["fapi2"] = "ver1" with Ecmd(**extensions): t = loopTargets("pu", ECMD_SELECTED_TARGETS_LOOP)[0] data = t.getScom(0x1234) t.putScom(0x1234, 0x10100000) # These interfaces may not be defined for some plugins # Pull them to prevent compile issues #core_id, thread_id = t.targetToSequenceId() #unit_id_string = unitIdToString(2) #clock_state = t.queryClockState("SOMECLOCK") t.relatedTargets("pu.c") retval = t.queryFileLocationHidden2(ECMD_FILE_SCANDEF, "") for loc in retval.fileLocations: testval = loc.textFile + loc.hashFile + retval.version if "fapi2" in extensions: try: t.fapi2GetAttr("ATTR_DOES_NOT_EXIST") assert(""=="That was supposed to throw!") except KeyError: pass t.fapi2SetAttr("ATTR_CHIP_ID", 42) assert(42 == t.fapi2GetAttr("ATTR_CHIP_ID"))
Make fapi2 test conditional on fapi2 being built into ecmd
pyecmd: Make fapi2 test conditional on fapi2 being built into ecmd
Python
apache-2.0
mklight/eCMD,mklight/eCMD,mklight/eCMD,mklight/eCMD,open-power/eCMD,open-power/eCMD,open-power/eCMD,open-power/eCMD,open-power/eCMD,mklight/eCMD
--- +++ @@ -1,6 +1,10 @@ from pyecmd import * -with Ecmd(fapi2="ver1"): +extensions = {} +if hasattr(ecmd, "fapi2InitExtension"): + extensions["fapi2"] = "ver1" + +with Ecmd(**extensions): t = loopTargets("pu", ECMD_SELECTED_TARGETS_LOOP)[0] data = t.getScom(0x1234) t.putScom(0x1234, 0x10100000) @@ -13,11 +17,13 @@ retval = t.queryFileLocationHidden2(ECMD_FILE_SCANDEF, "") for loc in retval.fileLocations: testval = loc.textFile + loc.hashFile + retval.version - try: - t.fapi2GetAttr("ATTR_DOES_NOT_EXIST") - assert(""=="That was supposed to throw!") - except KeyError: - pass - t.fapi2SetAttr("ATTR_CHIP_ID", 42) - assert(42 == t.fapi2GetAttr("ATTR_CHIP_ID")) + if "fapi2" in extensions: + try: + t.fapi2GetAttr("ATTR_DOES_NOT_EXIST") + assert(""=="That was supposed to throw!") + except KeyError: + pass + + t.fapi2SetAttr("ATTR_CHIP_ID", 42) + assert(42 == t.fapi2GetAttr("ATTR_CHIP_ID"))
01e9df01bc17561d0f489f1647ce5e0c566372e5
flocker/provision/__init__.py
flocker/provision/__init__.py
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Provisioning for acceptance tests. """ from ._common import PackageSource from ._install import provision from ._rackspace import rackspace_provisioner from ._aws import aws_provisioner # import digitalocean_provisioner __all__ = [ 'PackageSource', 'provision', 'rackspace_provisioner', 'aws_provisioner' # digitalocean_provisioner ]
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Provisioning for acceptance tests. """ from ._common import PackageSource from ._install import provision from ._rackspace import rackspace_provisioner from ._aws import aws_provisioner from ._digitalocean import digitalocean_provisioner __all__ = [ 'PackageSource', 'provision', 'rackspace_provisioner', 'aws_provisioner', 'digitalocean_provisioner' ]
Make the digitalocean provisioner public
Make the digitalocean provisioner public
Python
apache-2.0
wallnerryan/flocker-profiles,1d4Nf6/flocker,hackday-profilers/flocker,moypray/flocker,mbrukman/flocker,hackday-profilers/flocker,agonzalezro/flocker,1d4Nf6/flocker,w4ngyi/flocker,moypray/flocker,agonzalezro/flocker,mbrukman/flocker,adamtheturtle/flocker,moypray/flocker,AndyHuu/flocker,achanda/flocker,lukemarsden/flocker,LaynePeng/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,Azulinho/flocker,achanda/flocker,adamtheturtle/flocker,jml/flocker,runcom/flocker,w4ngyi/flocker,agonzalezro/flocker,Azulinho/flocker,LaynePeng/flocker,w4ngyi/flocker,1d4Nf6/flocker,LaynePeng/flocker,runcom/flocker,runcom/flocker,mbrukman/flocker,Azulinho/flocker,lukemarsden/flocker,AndyHuu/flocker,hackday-profilers/flocker,wallnerryan/flocker-profiles,achanda/flocker,adamtheturtle/flocker,AndyHuu/flocker,jml/flocker,jml/flocker
--- +++ @@ -8,10 +8,9 @@ from ._install import provision from ._rackspace import rackspace_provisioner from ._aws import aws_provisioner -# import digitalocean_provisioner +from ._digitalocean import digitalocean_provisioner __all__ = [ 'PackageSource', 'provision', - 'rackspace_provisioner', 'aws_provisioner' - # digitalocean_provisioner + 'rackspace_provisioner', 'aws_provisioner', 'digitalocean_provisioner' ]
1ba88cf7d087c2783306854ea3fbc16c5fe17df4
wagtail/utils/compat.py
wagtail/utils/compat.py
def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model)
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
Check Django version instead of hasattr
Check Django version instead of hasattr
Python
bsd-3-clause
mayapurmedia/wagtail,chrxr/wagtail,darith27/wagtail,mjec/wagtail,rv816/wagtail,rsalmaso/wagtail,stevenewey/wagtail,KimGlazebrook/wagtail-experiment,kurtw/wagtail,serzans/wagtail,m-sanders/wagtail,KimGlazebrook/wagtail-experiment,JoshBarr/wagtail,JoshBarr/wagtail,inonit/wagtail,kaedroho/wagtail,zerolab/wagtail,FlipperPA/wagtail,m-sanders/wagtail,wagtail/wagtail,mikedingjan/wagtail,janusnic/wagtail,takeshineshiro/wagtail,rv816/wagtail,inonit/wagtail,WQuanfeng/wagtail,iansprice/wagtail,rsalmaso/wagtail,serzans/wagtail,inonit/wagtail,Toshakins/wagtail,nimasmi/wagtail,nutztherookie/wagtail,rjsproxy/wagtail,bjesus/wagtail,Tivix/wagtail,marctc/wagtail,Tivix/wagtail,mikedingjan/wagtail,gasman/wagtail,gasman/wagtail,kaedroho/wagtail,bjesus/wagtail,quru/wagtail,timorieber/wagtail,iho/wagtail,stevenewey/wagtail,mephizzle/wagtail,taedori81/wagtail,nimasmi/wagtail,Klaudit/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,timorieber/wagtail,nrsimha/wagtail,thenewguy/wagtail,takeflight/wagtail,tangentlabs/wagtail,bjesus/wagtail,JoshBarr/wagtail,mayapurmedia/wagtail,FlipperPA/wagtail,Pennebaker/wagtail,mixxorz/wagtail,kurtrwall/wagtail,nilnvoid/wagtail,kurtw/wagtail,nrsimha/wagtail,takeflight/wagtail,takeflight/wagtail,darith27/wagtail,Klaudit/wagtail,kurtrwall/wagtail,rjsproxy/wagtail,marctc/wagtail,m-sanders/wagtail,nilnvoid/wagtail,bjesus/wagtail,thenewguy/wagtail,mephizzle/wagtail,jordij/wagtail,hanpama/wagtail,takeshineshiro/wagtail,hamsterbacke23/wagtail,quru/wagtail,FlipperPA/wagtail,hanpama/wagtail,nimasmi/wagtail,zerolab/wagtail,stevenewey/wagtail,taedori81/wagtail,darith27/wagtail,m-sanders/wagtail,torchbox/wagtail,kurtw/wagtail,hanpama/wagtail,nealtodd/wagtail,taedori81/wagtail,mikedingjan/wagtail,nrsimha/wagtail,Pennebaker/wagtail,mjec/wagtail,wagtail/wagtail,zerolab/wagtail,nilnvoid/wagtail,hanpama/wagtail,timorieber/wagtail,Klaudit/wagtail,quru/wagtail,mephizzle/wagtail,serzans/wagtail,Toshakins/wagtail,Pennebaker/wagtail,mixxorz/wagtail,jordij/wagtail,tangentlabs/wagtail,mjec/wagtail,WQuanfeng/wagtail,janusnic/wagtail,rsalmaso/wagtail,nealtodd/wagtail,thenewguy/wagtail,chrxr/wagtail,KimGlazebrook/wagtail-experiment,taedori81/wagtail,davecranwell/wagtail,Tivix/wagtail,nilnvoid/wagtail,torchbox/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,nutztherookie/wagtail,marctc/wagtail,KimGlazebrook/wagtail-experiment,janusnic/wagtail,nrsimha/wagtail,torchbox/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,davecranwell/wagtail,rjsproxy/wagtail,torchbox/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,davecranwell/wagtail,jordij/wagtail,iho/wagtail,taedori81/wagtail,nimasmi/wagtail,rsalmaso/wagtail,mayapurmedia/wagtail,mixxorz/wagtail,wagtail/wagtail,chrxr/wagtail,gogobook/wagtail,thenewguy/wagtail,quru/wagtail,darith27/wagtail,thenewguy/wagtail,iansprice/wagtail,iho/wagtail,kaedroho/wagtail,gasman/wagtail,jordij/wagtail,hamsterbacke23/wagtail,WQuanfeng/wagtail,marctc/wagtail,rsalmaso/wagtail,Toshakins/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,gasman/wagtail,nutztherookie/wagtail,takeshineshiro/wagtail,janusnic/wagtail,rv816/wagtail,gogobook/wagtail,rjsproxy/wagtail,kurtrwall/wagtail,kaedroho/wagtail,mjec/wagtail,mephizzle/wagtail,nealtodd/wagtail,WQuanfeng/wagtail,iansprice/wagtail,Tivix/wagtail,wagtail/wagtail,JoshBarr/wagtail,stevenewey/wagtail,Pennebaker/wagtail,Klaudit/wagtail,wagtail/wagtail,iansprice/wagtail,tangentlabs/wagtail,chrxr/wagtail,timorieber/wagtail,rv816/wagtail,mayapurmedia/wagtail,jnns/wagtail,zerolab/wagtail,jnns/wagtail,zerolab/wagtail,serzans/wagtail,gasman/wagtail,mixxorz/wagtail,inonit/wagtail,davecranwell/wagtail,jnns/wagtail,Toshakins/wagtail,kaedroho/wagtail,jnns/wagtail,mixxorz/wagtail,iho/wagtail,gogobook/wagtail,takeflight/wagtail,kurtw/wagtail
--- +++ @@ -1,5 +1,11 @@ +import django + + def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. - return getattr(rel, 'related_model', rel.model) + if django.VERSION >= (1, 8): + return rel.related_model + else: + return rel.model
feecf02f831394747cdee63892d12811f137938c
elevator/api.py
elevator/api.py
#!/usr/bin/env python # -*- coding: utf-8 -*- class Handler(object): def __init__(self, db): # Each handlers is formatted following # the pattern : [ command, # default return value, # raised error ] self.handles = { 'GET': (db.Get, "", KeyError), 'PUT': (db.Put, "True", TypeError), 'DELETE': (db.Delete, ""), } def command(self, message): op_code = message.op_code args = message.data if op_code in self.handles: if len(self.handles[op_code]) == 2: return self.handles[op_code](*args) else: # FIXME # global except catching is a total # performance killer. Should enhance # the handles attributes to link possible # exceptions with leveldb methods. try: value = self.handles[op_code][0](*args) except self.handles[op_code][2]: return "" else: raise KeyError("op_code not handle") return value if value else self.handles[op_code][1]
#!/usr/bin/env python # -*- coding: utf-8 -*- class Handler(object): def __init__(self, db): # Each handlers is formatted following # the pattern : [ command, # default return value, # raised error ] self.handles = { 'GET': (db.Get, "", KeyError), 'PUT': (db.Put, "True", TypeError), 'DELETE': (db.Delete, ""), } def command(self, message): op_code = message.op_code args = message.data if op_code in self.handles: if len(self.handles[op_code]) == 2: value = self.handles[op_code][0](*args) else: # FIXME # global except catching is a total # performance killer. Should enhance # the handles attributes to link possible # exceptions with leveldb methods. try: value = self.handles[op_code][0](*args) except self.handles[op_code][2]: return "" else: raise KeyError("op_code not handle") return value if value else self.handles[op_code][1]
Fix : delete command call badly formatted in handler
Fix : delete command call badly formatted in handler
Python
mit
oleiade/Elevator
--- +++ @@ -20,7 +20,7 @@ if op_code in self.handles: if len(self.handles[op_code]) == 2: - return self.handles[op_code](*args) + value = self.handles[op_code][0](*args) else: # FIXME # global except catching is a total
c6f2ff563c08eb43ba3f33bc9aaa2647e78701d2
fenced_code_plus/__init__.py
fenced_code_plus/__init__.py
from fenced_code_plus import FencedCodePlusExtension from fenced_code_plus import makeExtension
from __future__ import absolute_import from fenced_code_plus.fenced_code_plus import FencedCodePlusExtension from fenced_code_plus.fenced_code_plus import makeExtension
Make import compatable with python3.5
Make import compatable with python3.5
Python
bsd-3-clause
amfarrell/fenced-code-plus
--- +++ @@ -1,2 +1,4 @@ -from fenced_code_plus import FencedCodePlusExtension -from fenced_code_plus import makeExtension +from __future__ import absolute_import + +from fenced_code_plus.fenced_code_plus import FencedCodePlusExtension +from fenced_code_plus.fenced_code_plus import makeExtension
8b127a3d934470aa20fbff83d06ded2e37d00579
deferrable/delay.py
deferrable/delay.py
"""This may seem like a silly module right now, but we had to separate this out so that deferrable.py and its sub-modules could all import it without circular imports.""" MAXIMUM_DELAY_SECONDS = 900
"""This may seem like a silly module right now, but we had to separate this out so that deferrable.py and its sub-modules could all import it without circular imports.""" # SQS has a hard limit of 900 seconds, and Dockets # delay queues incur heavy performance penalties, # so this seems like a reasonable limit for all MAXIMUM_DELAY_SECONDS = 900
Add back some reasoning on the 900 number
Add back some reasoning on the 900 number
Python
mit
gamechanger/deferrable
--- +++ @@ -2,4 +2,7 @@ separate this out so that deferrable.py and its sub-modules could all import it without circular imports.""" +# SQS has a hard limit of 900 seconds, and Dockets +# delay queues incur heavy performance penalties, +# so this seems like a reasonable limit for all MAXIMUM_DELAY_SECONDS = 900
e2909520e93e85286bd4393426377e48db243615
hastexo_social_auth/oauth2.py
hastexo_social_auth/oauth2.py
from social.backends.oauth import BaseOAuth2 class HastexoOAuth2(BaseOAuth2): """Hastexo OAuth2 authentication backend""" name = 'hastexo' AUTHORIZATION_URL = 'https://store.hastexo.com/o/authorize/' ACCESS_TOKEN_URL = 'https://store.hastexo.com/o/token/' ACCESS_TOKEN_METHOD = 'POST' SCOPE_SEPARATOR = ' ' def get_user_details(self, response): """Return user details from hastexo account""" return { 'username': response['username'], 'email': response.get('email', ''), 'first_name': '', 'last_name': '', } def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" return self.get_json('https://store.hastexo.com/api/users/', params={ 'access_token': access_token })
from social.backends.oauth import BaseOAuth2 class HastexoOAuth2(BaseOAuth2): """Hastexo OAuth2 authentication backend""" name = 'hastexo' AUTHORIZATION_URL = 'https://store.hastexo.com/o/authorize/' ACCESS_TOKEN_URL = 'https://store.hastexo.com/o/token/' ACCESS_TOKEN_METHOD = 'POST' SCOPE_SEPARATOR = ' ' def get_user_details(self, response): """Return user details from hastexo account""" return { 'username': response.get('username'), 'email': response.get('email', ''), 'first_name': response.get('first_name', ''), 'last_name': response.get('last_name', '') } def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" return self.get_json('https://store.hastexo.com/api/login/', params={ 'access_token': access_token })
Update user details API call
Update user details API call
Python
bsd-3-clause
hastexo/python-social-auth-hastexo,arbrandes/python-social-auth-hastexo
--- +++ @@ -13,14 +13,14 @@ def get_user_details(self, response): """Return user details from hastexo account""" return { - 'username': response['username'], + 'username': response.get('username'), 'email': response.get('email', ''), - 'first_name': '', - 'last_name': '', + 'first_name': response.get('first_name', ''), + 'last_name': response.get('last_name', '') } def user_data(self, access_token, *args, **kwargs): """Loads user data from service""" - return self.get_json('https://store.hastexo.com/api/users/', params={ + return self.get_json('https://store.hastexo.com/api/login/', params={ 'access_token': access_token })
52cb80dd92ceabd7d2efe67c0a89f76cd701283b
statirator/main.py
statirator/main.py
import os import sys def main(): # init is a special case, cause we want to add statirator.core to # INSTALLED_APPS, and have the command picked up. we'll handle it in here if 'init' in sys.argv: from django.conf import settings settings.configure(INSTALLED_APPS=('statirator.core', )) elif 'test' in sys.argv: os.environ.setdefault( "DJANGO_SETTINGS_MODULE", "statirator.test_settings") from django.core import management management.execute_from_command_line() if __name__ == '__main__': main()
import os import sys def main(): if 'test' in sys.argv: os.environ.setdefault( "DJANGO_SETTINGS_MODULE", "statirator.test_settings") else: from django.conf import settings settings.configure(INSTALLED_APPS=('statirator.core', )) from django.core import management management.execute_from_command_line() if __name__ == '__main__': main()
Add statirator.core for all commands except test
Add statirator.core for all commands except test
Python
mit
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
--- +++ @@ -3,15 +3,12 @@ def main(): - # init is a special case, cause we want to add statirator.core to - # INSTALLED_APPS, and have the command picked up. we'll handle it in here - - if 'init' in sys.argv: + if 'test' in sys.argv: + os.environ.setdefault( + "DJANGO_SETTINGS_MODULE", "statirator.test_settings") + else: from django.conf import settings settings.configure(INSTALLED_APPS=('statirator.core', )) - elif 'test' in sys.argv: - os.environ.setdefault( - "DJANGO_SETTINGS_MODULE", "statirator.test_settings") from django.core import management management.execute_from_command_line()
c7ec2805d1c3dde9ff3bf8caacf0bac474a1d468
cybox/utils.py
cybox/utils.py
#Common utility methods #Test if a dictionary value is not None and has a length greater than 0 def test_value(value): if value.get('value') is not None: if value.get('value') is not None and len(str(value.get('value'))) > 0: return True else: return False else: return False
"""Common utility methods""" def test_value(value): """ Test if a dictionary contains a "value" key whose value is not None and has a length greater than 0. We explicitly want to return True even if the value is False or 0, since some parts of the standards are boolean or allow a 0 value, and we want to distinguish the case where the "value" key is omitted entirely. """ v = value.get('value', None) return (v is not None) and (len(str(v)) > 0)
Clean up and document 'test_value' function.
Clean up and document 'test_value' function.
Python
bsd-3-clause
CybOXProject/python-cybox
--- +++ @@ -1,11 +1,13 @@ -#Common utility methods +"""Common utility methods""" -#Test if a dictionary value is not None and has a length greater than 0 def test_value(value): - if value.get('value') is not None: - if value.get('value') is not None and len(str(value.get('value'))) > 0: - return True - else: - return False - else: - return False + """ + Test if a dictionary contains a "value" key whose value is not None + and has a length greater than 0. + + We explicitly want to return True even if the value is False or 0, since + some parts of the standards are boolean or allow a 0 value, and we want to + distinguish the case where the "value" key is omitted entirely. + """ + v = value.get('value', None) + return (v is not None) and (len(str(v)) > 0)
1e690994c89b61f3331f6ccacff1dbc822a224a1
homedisplay/display/views.py
homedisplay/display/views.py
from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.shortcuts import render_to_response from django.template import RequestContext from django.views.generic import View from homedisplay.utils import publish_ws class Wrapped(View): def get(self, request, *args, **kwargs): return render_to_response("index_frame.html", {"frame_src": "/homecontroller/display/content/%s" % kwargs.get("view") }, context_instance=RequestContext(request))
from django.http import HttpResponseRedirect, HttpResponse, Http404 from django.shortcuts import render_to_response from django.template import RequestContext from django.views.generic import View from homedisplay.utils import publish_ws class Wrapped(View): def get(self, request, *args, **kwargs): return render_to_response("frame.html", {"frame_src": "/homecontroller/display/content/%s" % kwargs.get("view") }, context_instance=RequestContext(request))
Use renamed frame template file
Use renamed frame template file
Python
bsd-3-clause
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
--- +++ @@ -6,4 +6,4 @@ class Wrapped(View): def get(self, request, *args, **kwargs): - return render_to_response("index_frame.html", {"frame_src": "/homecontroller/display/content/%s" % kwargs.get("view") }, context_instance=RequestContext(request)) + return render_to_response("frame.html", {"frame_src": "/homecontroller/display/content/%s" % kwargs.get("view") }, context_instance=RequestContext(request))
7cf32e0300735af93dfe0d90ac1e9c20adbc5cf5
tests/b009_b010.py
tests/b009_b010.py
""" Should emit: B009 - Line 17, 18, 19 B010 - Line 28, 29, 30 """ # Valid getattr usage getattr(foo, bar) getattr(foo, "bar", None) getattr(foo, "bar{foo}".format(foo="a"), None) getattr(foo, "bar{foo}".format(foo="a")) getattr(foo, bar, None) getattr(foo, "123abc") getattr(foo, "except") # Invalid usage getattr(foo, "bar") getattr(foo, "_123abc") getattr(foo, "abc123") # Valid setattr usage setattr(foo, bar, None) setattr(foo, "bar{foo}".format(foo="a"), None) setattr(foo, "123abc", None) getattr(foo, "except", None) # Invalid usage setattr(foo, "bar", None) setattr(foo, "_123abc", None) setattr(foo, "abc123", None)
""" Should emit: B009 - Line 17, 18, 19 B010 - Line 28, 29, 30 """ # Valid getattr usage getattr(foo, bar) getattr(foo, "bar", None) getattr(foo, "bar{foo}".format(foo="a"), None) getattr(foo, "bar{foo}".format(foo="a")) getattr(foo, bar, None) getattr(foo, "123abc") getattr(foo, "except") # Invalid usage getattr(foo, "bar") getattr(foo, "_123abc") getattr(foo, "abc123") # Valid setattr usage setattr(foo, bar, None) setattr(foo, "bar{foo}".format(foo="a"), None) setattr(foo, "123abc", None) setattr(foo, "except", None) # Invalid usage setattr(foo, "bar", None) setattr(foo, "_123abc", None) setattr(foo, "abc123", None)
Fix wrong call method in test (GH-179)
Fix wrong call method in test (GH-179)
Python
mit
ambv/flake8-bugbear,PyCQA/flake8-bugbear
--- +++ @@ -22,7 +22,7 @@ setattr(foo, bar, None) setattr(foo, "bar{foo}".format(foo="a"), None) setattr(foo, "123abc", None) -getattr(foo, "except", None) +setattr(foo, "except", None) # Invalid usage setattr(foo, "bar", None)
3fc94b4cffcfd08b439386fb2b01aa1e12fec6d5
iati/core/tests/test_data.py
iati/core/tests/test_data.py
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" pass
"""A module containing tests for the library representation of IATI data.""" import iati.core.data class TestDatasets(object): """A container for tests relating to Datasets""" def test_dataset_no_params(self): """Test Dataset creation with no parameters.""" pass def test_dataset_valid_xml_string(self): """Test Dataset creation with a valid XML string that is not IATI data.""" pass def test_dataset_valid_iati_string(self): """Test Dataset creation with a valid IATI XML string.""" pass def test_dataset_invalid_xml_string(self): """Test Dataset creation with a string that is not valid XML.""" pass def test_dataset_tree(self): """Test Dataset creation with an etree that is not valid IATI data.""" pass def test_dataset_iati_tree(self): """Test Dataset creation with a valid IATI etree.""" pass def test_dataset_no_params_strict(self): """Test Dataset creation with no parameters. Strict IATI checks are enabled. """ pass def test_dataset_valid_xml_string_strict(self): """Test Dataset creation with a valid XML string that is not IATI data. Strict IATI checks are enabled. """ pass def test_dataset_valid_iati_string_strict(self): """Test Dataset creation with a valid IATI XML string. Strict IATI checks are enabled. """ pass def test_dataset_invalid_xml_string_strict(self): """Test Dataset creation with a string that is not valid XML. Strict IATI checks are enabled. """ pass def test_dataset_tree_strict(self): """Test Dataset creation with an etree that is not valid IATI data. Strict IATI checks are enabled. """ pass def test_dataset_iati_tree_strict(self): """Test Dataset creation with a valid IATI etree. Strict IATI checks are enabled. """ pass
Test stubs for dataset creation
Test stubs for dataset creation
Python
mit
IATI/iati.core,IATI/iati.core
--- +++ @@ -5,4 +5,62 @@ class TestDatasets(object): """A container for tests relating to Datasets""" - pass + def test_dataset_no_params(self): + """Test Dataset creation with no parameters.""" + pass + + def test_dataset_valid_xml_string(self): + """Test Dataset creation with a valid XML string that is not IATI data.""" + pass + + def test_dataset_valid_iati_string(self): + """Test Dataset creation with a valid IATI XML string.""" + pass + + def test_dataset_invalid_xml_string(self): + """Test Dataset creation with a string that is not valid XML.""" + pass + + def test_dataset_tree(self): + """Test Dataset creation with an etree that is not valid IATI data.""" + pass + + def test_dataset_iati_tree(self): + """Test Dataset creation with a valid IATI etree.""" + pass + + def test_dataset_no_params_strict(self): + """Test Dataset creation with no parameters. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_valid_xml_string_strict(self): + """Test Dataset creation with a valid XML string that is not IATI data. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_valid_iati_string_strict(self): + """Test Dataset creation with a valid IATI XML string. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_invalid_xml_string_strict(self): + """Test Dataset creation with a string that is not valid XML. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_tree_strict(self): + """Test Dataset creation with an etree that is not valid IATI data. + Strict IATI checks are enabled. + """ + pass + + def test_dataset_iati_tree_strict(self): + """Test Dataset creation with a valid IATI etree. + Strict IATI checks are enabled. + """ + pass
7f3476ec37fcea4195f1a180c1515fd321d78697
tests/run_tests.py
tests/run_tests.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import nose import logging logging.disable(logging.DEBUG) # Disable debug logging when running the test suite. def start(argv=None): sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n') if argv is None: argv = [ 'nosetests', '--verbose', '--with-coverage', '--cover-html', '--cover-html-dir=.htmlcov', '--cover-erase', '--cover-branches', '--cover-package=chemtrails', ] nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__))) if __name__ == '__main__': start(sys.argv)
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import nose import logging logging.disable(logging.INFO) # Disable debug logging when running the test suite. def start(argv=None): sys.exitfunc = lambda: sys.stderr.write('Shutting down...\n') if argv is None: argv = [ 'nosetests', '--verbose', '--with-coverage', '--cover-html', '--cover-html-dir=.htmlcov', '--cover-erase', '--cover-branches', '--cover-package=chemtrails', ] nose.run_exit(argv=argv, defaultTest=os.path.abspath(os.path.dirname(__file__))) if __name__ == '__main__': start(sys.argv)
Disable INFO messages and down when running test suite
Disable INFO messages and down when running test suite
Python
mit
inonit/django-chemtrails,inonit/django-chemtrails,inonit/django-chemtrails
--- +++ @@ -6,7 +6,7 @@ import nose import logging -logging.disable(logging.DEBUG) # Disable debug logging when running the test suite. +logging.disable(logging.INFO) # Disable debug logging when running the test suite. def start(argv=None):
da314ab34cb13c1de66b96da2eab1484639e124b
fiona/compat.py
fiona/compat.py
try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict
import collections from six.moves import UserDict try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict # Users can pass in objects that subclass a few different objects # More specifically, rasterio has a CRS() class that subclasses UserDict() # In Python 2 UserDict() is in its own module and does not subclass Mapping() DICT_TYPES = (dict, collections.Mapping, UserDict)
Add a DICT_TYPES variable so we can do isinstance() checks against all the builtin dict-like objects
Add a DICT_TYPES variable so we can do isinstance() checks against all the builtin dict-like objects
Python
bsd-3-clause
Toblerity/Fiona,rbuffat/Fiona,rbuffat/Fiona,Toblerity/Fiona
--- +++ @@ -1,4 +1,12 @@ +import collections +from six.moves import UserDict try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict + + +# Users can pass in objects that subclass a few different objects +# More specifically, rasterio has a CRS() class that subclasses UserDict() +# In Python 2 UserDict() is in its own module and does not subclass Mapping() +DICT_TYPES = (dict, collections.Mapping, UserDict)
1c2a981e007b9a205db1302370dff6a6ea15bf8c
iati/versions.py
iati/versions.py
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of).""" import re import iati.constants class Version(object): """Representation of an IATI Standard Version Number.""" def __init__(self, version_string): """Initialise a Version Number. Args: version_string (str): A string representation of an IATI version number. Raises: TypeError: If an attempt to pass something that is not a string is made. ValueError: If a provided string is not a version number. """ if not isinstance(version_string, str): raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string))) iati_version_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$') if not iati_version_re.match(version_string): raise ValueError('A valid version number must be specified.')
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of).""" import re import iati.constants class Version(object): """Representation of an IATI Standard Version Number.""" def __init__(self, version_string): """Initialise a Version Number. Args: version_string (str): A string representation of an IATI version number. Raises: TypeError: If an attempt to pass something that is not a string is made. ValueError: If a provided string is not a version number. """ if not isinstance(version_string, str): raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string))) # a regex for what makes a valid IATIver Version Number format string iativer_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$') if not iativer_re.match(version_string): raise ValueError('A valid version number must be specified.')
Add a comment to make some sense of a regex
Add a comment to make some sense of a regex
Python
mit
IATI/iati.core,IATI/iati.core
--- +++ @@ -20,7 +20,8 @@ if not isinstance(version_string, str): raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string))) - iati_version_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$') + # a regex for what makes a valid IATIver Version Number format string + iativer_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$') - if not iati_version_re.match(version_string): + if not iativer_re.match(version_string): raise ValueError('A valid version number must be specified.')
0ab048e8363a60d47ba780cb622a72343aaf65f2
tests/test_urls.py
tests/test_urls.py
# -*- coding: utf-8 -*- from django.conf.urls import include, url from django.contrib import admin from django.http.response import HttpResponse admin.autodiscover() def empty_view(request): return HttpResponse() urlpatterns = [ url(r'^home/', empty_view, name="home"), url(r'^admin/', admin.site.urls), url(r'^djstripe/', include("djstripe.urls", namespace="djstripe")), url(r'^testapp/', include('tests.apps.testapp.urls')), url(r'^__debug__/', include('tests.apps.testapp.urls')), url( r'^testapp_namespaced/', include('tests.apps.testapp_namespaced.urls', namespace="testapp_namespaced") ), # Represents protected content url(r'^testapp_content/', include('tests.apps.testapp_content.urls')), # For testing fnmatches url(r"test_fnmatch/extra_text/$", empty_view, name="test_fnmatch"), # Default for DJSTRIPE_SUBSCRIPTION_REDIRECT url(r"subscribe/$", empty_view, name="test_url_subscribe") ]
# -*- coding: utf-8 -*- from django.conf.urls import include, url from django.contrib import admin from django.http.response import HttpResponse admin.autodiscover() def empty_view(request): return HttpResponse() urlpatterns = [ url(r'^home/', empty_view, name="home"), url(r'^admin/', admin.site.urls), url(r'^djstripe/', include("djstripe.urls", namespace="djstripe")), url(r'^testapp/', include('tests.apps.testapp.urls')), url( r'^testapp_namespaced/', include('tests.apps.testapp_namespaced.urls', namespace="testapp_namespaced") ), # Represents protected content url(r'^testapp_content/', include('tests.apps.testapp_content.urls')), # For testing fnmatches url(r"test_fnmatch/extra_text/$", empty_view, name="test_fnmatch"), # Default for DJSTRIPE_SUBSCRIPTION_REDIRECT url(r"subscribe/$", empty_view, name="test_url_subscribe") ]
Remove useless url from test urls
Remove useless url from test urls
Python
mit
pydanny/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe,pydanny/dj-stripe,dj-stripe/dj-stripe
--- +++ @@ -16,7 +16,6 @@ url(r'^admin/', admin.site.urls), url(r'^djstripe/', include("djstripe.urls", namespace="djstripe")), url(r'^testapp/', include('tests.apps.testapp.urls')), - url(r'^__debug__/', include('tests.apps.testapp.urls')), url( r'^testapp_namespaced/', include('tests.apps.testapp_namespaced.urls', namespace="testapp_namespaced")
305d04fc0841035bf744480261017c14ae3045b0
syntax_makefile.py
syntax_makefile.py
import wx.stc ident = "makefile" name = "Makefile" extensions = ["Makefile", "*.mk"] lexer = wx.stc.STC_LEX_MAKEFILE indent = 8 use_tabs = True stylespecs = ( (wx.stc.STC_STYLE_DEFAULT, ""), ) keywords = ""
import wx.stc ident = "makefile" name = "Makefile" extensions = ["*Makefile", "*makefile", "*.mk"] lexer = wx.stc.STC_LEX_MAKEFILE indent = 8 use_tabs = True stylespecs = ( (wx.stc.STC_STYLE_DEFAULT, ""), ) keywords = ""
Make files ending in makefile or Makefile.
Make files ending in makefile or Makefile.
Python
mit
shaurz/devo
--- +++ @@ -2,7 +2,7 @@ ident = "makefile" name = "Makefile" -extensions = ["Makefile", "*.mk"] +extensions = ["*Makefile", "*makefile", "*.mk"] lexer = wx.stc.STC_LEX_MAKEFILE indent = 8 use_tabs = True
19ac41a14875c6df2ed9ddf7b7b315ffb5c70819
tests/specs/test_yaml_file.py
tests/specs/test_yaml_file.py
import unittest try: from unittest import mock except ImportError: import mock from conda_env import env from conda_env.specs.yaml_file import YamlFileSpec class TestYAMLFile(unittest.TestCase): def test_no_environment_file(self): spec = YamlFileSpec(name=None, filename='not-a-file') self.assertEqual(spec.can_handle(), False) def test_environment_file_exist(self): with mock.patch.object(env, 'from_file', return_value={}): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertTrue(spec.can_handle()) def test_get_environment(self): with mock.patch.object(env, 'from_file', return_value={}): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertIsInstance(spec.environment, dict)
import unittest import random try: from unittest import mock except ImportError: import mock from conda_env import env from conda_env.specs.yaml_file import YamlFileSpec class TestYAMLFile(unittest.TestCase): def test_no_environment_file(self): spec = YamlFileSpec(name=None, filename='not-a-file') self.assertEqual(spec.can_handle(), False) def test_environment_file_exist(self): with mock.patch.object(env, 'from_file', return_value={}): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertTrue(spec.can_handle()) def test_get_environment(self): r = random.randint(100, 200) with mock.patch.object(env, 'from_file', return_value=r): spec = YamlFileSpec(name=None, filename='environment.yaml') self.assertEqual(spec.environment, r) def test_filename(self): filename = "filename_{}".format(random.randint(100, 200)) with mock.patch.object(env, 'from_file') as from_file: spec = YamlFileSpec(filename=filename) spec.environment from_file.assert_called_with(filename)
Add more tests to YamlFile class
Add more tests to YamlFile class
Python
bsd-3-clause
ESSS/conda-env,phobson/conda-env,conda/conda-env,asmeurer/conda-env,conda/conda-env,mikecroucher/conda-env,isaac-kit/conda-env,ESSS/conda-env,isaac-kit/conda-env,dan-blanchard/conda-env,phobson/conda-env,nicoddemus/conda-env,dan-blanchard/conda-env,asmeurer/conda-env,nicoddemus/conda-env,mikecroucher/conda-env
--- +++ @@ -1,4 +1,5 @@ import unittest +import random try: from unittest import mock except ImportError: @@ -19,6 +20,14 @@ self.assertTrue(spec.can_handle()) def test_get_environment(self): - with mock.patch.object(env, 'from_file', return_value={}): + r = random.randint(100, 200) + with mock.patch.object(env, 'from_file', return_value=r): spec = YamlFileSpec(name=None, filename='environment.yaml') - self.assertIsInstance(spec.environment, dict) + self.assertEqual(spec.environment, r) + + def test_filename(self): + filename = "filename_{}".format(random.randint(100, 200)) + with mock.patch.object(env, 'from_file') as from_file: + spec = YamlFileSpec(filename=filename) + spec.environment + from_file.assert_called_with(filename)
ecc3a9c90d20699c6f0bf18600cf9bd755b56d65
rollbar/contrib/fastapi/utils.py
rollbar/contrib/fastapi/utils.py
import logging log = logging.getLogger(__name__) class FastAPIVersionError(Exception): def __init__(self, version, reason=''): err_msg = f'FastAPI {version}+ is required' if reason: err_msg += f' {reason}' log.error(err_msg) return super().__init__(err_msg)
import functools import logging import fastapi log = logging.getLogger(__name__) class FastAPIVersionError(Exception): def __init__(self, version, reason=''): err_msg = f'FastAPI {version}+ is required' if reason: err_msg += f' {reason}' log.error(err_msg) return super().__init__(err_msg) class fastapi_min_version: def __init__(self, min_version): self.min_version = min_version def __call__(self, func): @functools.wraps(func) def wrapper(*args, **kwargs): if fastapi.__version__ < self.min_version: raise FastAPIVersionError( '0.41.0', reason=f'to use {func.__name__}() function' ) return func(*args, **kwargs) return wrapper
Add decorator to check minimum required FastAPI version
Add decorator to check minimum required FastAPI version
Python
mit
rollbar/pyrollbar
--- +++ @@ -1,4 +1,7 @@ +import functools import logging + +import fastapi log = logging.getLogger(__name__) @@ -11,3 +14,20 @@ log.error(err_msg) return super().__init__(err_msg) + + +class fastapi_min_version: + def __init__(self, min_version): + self.min_version = min_version + + def __call__(self, func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if fastapi.__version__ < self.min_version: + raise FastAPIVersionError( + '0.41.0', reason=f'to use {func.__name__}() function' + ) + + return func(*args, **kwargs) + + return wrapper
e895795c6461da12f4824994168893ff131c0dd9
runtests.py
runtests.py
import sys import django from django.conf import settings APP_NAME = 'impersonate' settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, USE_TZ=True, ROOT_URLCONF='{0}.tests'.format(APP_NAME), MIDDLEWARE_CLASSES=( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'impersonate.middleware.ImpersonateMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', APP_NAME, ), # turn off for testing, override in logging-specific tests IMPERSONATE_SESSION_LOGGING=False, ) from django.test.utils import get_runner try: django.setup() except AttributeError: pass TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests([APP_NAME]) if failures: sys.exit(failures)
import sys import django from django.conf import settings APP_NAME = 'impersonate' settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, USE_TZ=True, ROOT_URLCONF='{0}.tests'.format(APP_NAME), MIDDLEWARE_CLASSES=( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'impersonate.middleware.ImpersonateMiddleware', ), INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.admin', APP_NAME, ), # turn off for testing, override in logging-specific tests IMPERSONATE_DISABLE_LOGGING=True, ) from django.test.utils import get_runner try: django.setup() except AttributeError: pass TestRunner = get_runner(settings) test_runner = TestRunner() failures = test_runner.run_tests([APP_NAME]) if failures: sys.exit(failures)
Disable impersonation logging during testing
Disable impersonation logging during testing
Python
bsd-3-clause
Top20Talent/django-impersonate,Top20Talent/django-impersonate
--- +++ @@ -27,7 +27,7 @@ APP_NAME, ), # turn off for testing, override in logging-specific tests - IMPERSONATE_SESSION_LOGGING=False, + IMPERSONATE_DISABLE_LOGGING=True, ) from django.test.utils import get_runner
8582126efa9907b06e9f9b183a0919feba9fb6b0
indra/literature/dart_client.py
indra/literature/dart_client.py
import logging import requests from indra.config import CONFIG_DICT logger = logging.getLogger(__name__) dart_uname = CONFIG_DICT['DART_WM_USERNAME'] dart_pwd = CONFIG_DICT['DART_WM_PASSWORD'] dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \ '/dart/api/v1/readers/query' def query_dart_notifications(readers=None, versions=None, document_ids=None, timestamp=None): """ Parameters ---------- readers : list versions : list document_ids : list timestamp : dict("on"|"before"|"after",str) Returns ------- dict """ if all(v is None for v in [readers, versions, document_ids, timestamp]): return {} pd = {} if readers: pd['readers'] = readers if versions: pd['versions'] = versions if document_ids: pd['document_ids'] = document_ids if isinstance(timestamp, dict): pass # Check res = requests.post( dart_url, data={'metadata': None }, auth=(dart_uname, dart_pwd) ) if res.status_code != 200: logger.warning(f'Dart Notifications Endpoint returned with status' f' {res.status_code}: {res.text}') return {} return res.json()
import logging import requests from indra.config import get_config logger = logging.getLogger(__name__) dart_uname = get_config('DART_WM_USERNAME') dart_pwd = get_config('DART_WM_PASSWORD') dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \ '/dart/api/v1/readers/query' def query_dart_notifications(readers=None, versions=None, document_ids=None, timestamp=None): """ Parameters ---------- readers : list versions : list document_ids : list timestamp : dict("on"|"before"|"after",str) Returns ------- dict """ if all(v is None for v in [readers, versions, document_ids, timestamp]): return {} pd = {} if readers: pd['readers'] = readers if versions: pd['versions'] = versions if document_ids: pd['document_ids'] = document_ids if isinstance(timestamp, dict): pass # Check res = requests.post( dart_url, data={'metadata': None }, auth=(dart_uname, dart_pwd) ) if res.status_code != 200: logger.warning(f'Dart Notifications Endpoint returned with status' f' {res.status_code}: {res.text}') return {} return res.json()
Use get_config instead of CONFIG_DICT
Use get_config instead of CONFIG_DICT
Python
bsd-2-clause
johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra
--- +++ @@ -1,13 +1,13 @@ import logging import requests -from indra.config import CONFIG_DICT +from indra.config import get_config logger = logging.getLogger(__name__) -dart_uname = CONFIG_DICT['DART_WM_USERNAME'] -dart_pwd = CONFIG_DICT['DART_WM_PASSWORD'] +dart_uname = get_config('DART_WM_USERNAME') +dart_pwd = get_config('DART_WM_PASSWORD') dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
33c595574921a64bec79de5ca72a62c22d09eb94
django_todolist/todo/models.py
django_todolist/todo/models.py
from django.db import models class Todo(models.Model): """ Todo Model: name, description, created """ name = models.CharField(max_length=100, unique=True) description = models.TextField() created = models.DateTimeField() def __unicode__(self): return self.name
from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Todo(models.Model): """ Todo Model: name, description, created """ name = models.CharField(max_length=100, unique=True) description = models.TextField() created = models.DateTimeField() def __str__(self): return self.name
Add Python portability to model
Add Python portability to model
Python
bsd-3-clause
andresgz/django_todolist,andresgz/django_todolist,andresgz/django_todolist,andresgz/django_todolist
--- +++ @@ -1,6 +1,8 @@ from django.db import models +from django.utils.encoding import python_2_unicode_compatible +@python_2_unicode_compatible class Todo(models.Model): """ Todo Model: name, description, created @@ -9,5 +11,5 @@ description = models.TextField() created = models.DateTimeField() - def __unicode__(self): + def __str__(self): return self.name
16cca2bc9aa8d5ecf6eb4d829de00905d3d15759
conveyor/store.py
conveyor/store.py
class BaseStore(object): def set(self, key, value): raise NotImplementedError def get(self, key): raise NotImplementedError class InMemoryStore(BaseStore): def __init__(self, *args, **kwargs): super(InMemoryStore, self).__init__(*args, **kwargs) self._data = {} def set(self, key, value): self._data[key] = value def get(self, key): return self._data[key] class RedisStore(BaseStore): def __init__(self, connection=None, prefix=None, *args, **kwargs): super(RedisStore, self).__init__(*args, **kwargs) import redis self.redis = redis.StrictRedis(**connection) self.prefix = prefix def set(self, key, value): if self.prefix is not None: key = self.prefix + key self.redis.set(key, value) def get(self, key): if self.prefix is not None: key = self.prefix + key return self.redis.get(key)
class BaseStore(object): def set(self, key, value): raise NotImplementedError def get(self, key): raise NotImplementedError class InMemoryStore(BaseStore): def __init__(self, *args, **kwargs): super(InMemoryStore, self).__init__(*args, **kwargs) self._data = {} def set(self, key, value): self._data[key] = value def get(self, key): return self._data[key] class RedisStore(BaseStore): def __init__(self, url=None, prefix=None, *args, **kwargs): super(RedisStore, self).__init__(*args, **kwargs) import redis self.redis = redis.from_url(url) self.prefix = prefix def set(self, key, value): if self.prefix is not None: key = self.prefix + key self.redis.set(key, value) def get(self, key): if self.prefix is not None: key = self.prefix + key return self.redis.get(key)
Switch redis to use the from_url method
Switch redis to use the from_url method
Python
bsd-2-clause
crateio/carrier
--- +++ @@ -22,11 +22,11 @@ class RedisStore(BaseStore): - def __init__(self, connection=None, prefix=None, *args, **kwargs): + def __init__(self, url=None, prefix=None, *args, **kwargs): super(RedisStore, self).__init__(*args, **kwargs) import redis - self.redis = redis.StrictRedis(**connection) + self.redis = redis.from_url(url) self.prefix = prefix def set(self, key, value):
ef42117ec2bd2a275dcea5f5a2d57322bbd21faa
wafer/talks/tests/fixtures.py
wafer/talks/tests/fixtures.py
from wafer.talks.models import Talk, TalkType from wafer.tests.utils import create_user def create_talk_type(name): """Create a talk type""" return TalkType.objects.create(name=name) def create_talk(title, status, username=None, user=None, talk_type=None): if username: user = create_user(username) talk = Talk.objects.create( title=title, status=status, corresponding_author_id=user.id) talk.authors.add(user) talk.notes = "Some notes for talk %s" % title talk.private_notes = "Some private notes for talk %s" % title talk.save() if talk_type: talk.talk_type = talk_type talk.save() return talk
from wafer.talks.models import Talk, TalkType from wafer.tests.utils import create_user def create_talk_type(name): """Create a talk type""" return TalkType.objects.create(name=name) def create_talk(title, status, username=None, user=None, talk_type=None): if sum((user is None, username is None)) != 1: raise ValueError('One of user OR username must be specified') if username: user = create_user(username) talk = Talk.objects.create( title=title, status=status, corresponding_author_id=user.id) talk.authors.add(user) talk.notes = "Some notes for talk %s" % title talk.private_notes = "Some private notes for talk %s" % title talk.save() if talk_type: talk.talk_type = talk_type talk.save() return talk
Check that user OR username is specified
Check that user OR username is specified
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
--- +++ @@ -8,6 +8,8 @@ def create_talk(title, status, username=None, user=None, talk_type=None): + if sum((user is None, username is None)) != 1: + raise ValueError('One of user OR username must be specified') if username: user = create_user(username) talk = Talk.objects.create(
2c41a2b37df94339074fdc04ccb3ef560d2e6cac
falmer/events/filters.py
falmer/events/filters.py
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter from falmer.events.models import Curator from . import models class EventFilterSet(FilterSet): class Meta: model = models.Event fields = ( 'title', 'venue', 'type', 'bundle', 'parent', 'brand', 'student_group', 'from_time', 'to_time', 'audience_just_for_pgs', 'audience_suitable_kids_families', 'audience_good_to_meet_people', 'is_over_18_only', 'cost', 'alcohol', 'type', 'ticket_level', 'curated_by' ) title = CharFilter(lookup_expr='icontains') brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte') from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte') uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull') curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by') # # class BrandingPeriodFilerSet(FilterSet): # class Meta: # model = BrandingPeriod
Use SG slug for event filtering
Use SG slug for event filtering
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
--- +++ @@ -33,6 +33,7 @@ brand = CharFilter(field_name='brand__slug') bundle = CharFilter(field_name='bundle__slug') + student_group = CharFilter(field_name='student_group__slug') to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte')
2e4e9ab2bab2e0a4bd00e10dcf115b1f96d1c714
modules/urlparser/__init__.py
modules/urlparser/__init__.py
from modules import * import re import urllib2 import traceback try: import simplejson as json except ImportError: import json from unidecode import unidecode from twitter import Twitter from bitly import Bitly from youtube import Youtube class Urlparser(Module): """Checks incoming messages for possible urls. If a url is found then route the url to a corresponding module to handle. """ def __init__(self, *args, **kwargs): """Constructor.""" Module.__init__(self, kwargs=kwargs) self.url_patterns = [ Twitter, Youtube, Bitly, ] self.url_pattern = re.compile("http://(.*?)") def _register_events(self): self.add_event('pubmsg', 'parse_message') def parse_message(self, event): nick = event['nick'] # make sure the message contains a url before checking # the other handlers patterns try: for handler in self.url_patterns: m = handler.pattern.search(event['message']) if m: handler_instance = handler() msg = handler_instance.handle(event=event, match=m) if msg: self.server.privmsg(event['target'], msg.encode('ascii', 'ignore')) break except: print "<<Error>> in Urlparser" print traceback.print_exc()
from modules import * import re import urllib2 import traceback try: import simplejson as json except ImportError: import json from unidecode import unidecode from twitter import Twitter from bitly import Bitly from youtube import Youtube class Urlparser(Module): """Checks incoming messages for possible urls. If a url is found then route the url to a corresponding module to handle. """ def __init__(self, *args, **kwargs): """Constructor.""" Module.__init__(self, kwargs=kwargs) self.url_patterns = [ Twitter, Youtube, Bitly, ] self.url_pattern = re.compile("http://(.*?)") def _register_events(self): self.add_event('pubmsg', 'parse_message') def parse_message(self, event): nick = event['nick'] # make sure the message contains a url before checking # the other handlers patterns try: for handler in self.url_patterns: m = handler.pattern.search(event['message']) if m: handler_instance = handler() msg = handler_instance.handle(event=event, match=m) if msg: self.server.privmsg(event['target'], msg.encode('ascii', 'ignore')) break except: print "<<Error>> in Urlparser (%s)" % (event['message']) print traceback.print_exc()
Include url message when fail to run urlparser
Include url message when fail to run urlparser
Python
mit
billyvg/piebot
--- +++ @@ -51,6 +51,6 @@ self.server.privmsg(event['target'], msg.encode('ascii', 'ignore')) break except: - print "<<Error>> in Urlparser" + print "<<Error>> in Urlparser (%s)" % (event['message']) print traceback.print_exc()
a6d49059851450c7ea527941600564cb3f48cc72
flask_profiler/storage/base.py
flask_profiler/storage/base.py
class BaseStorage(object): """docstring for BaseStorage""" def __init__(self): super(BaseStorage, self).__init__() def filter(self, criteria): raise Exception("Not implemneted Error") def getSummary(self, criteria): raise Exception("Not implemneted Error") def insert(self, measurement): raise Exception("Not implemented Error") def delete(self, measurementId): raise Exception("Not imlemented Error")
class BaseStorage(object): """docstring for BaseStorage""" def __init__(self): super(BaseStorage, self).__init__() def filter(self, criteria): raise Exception("Not implemneted Error") def getSummary(self, criteria): raise Exception("Not implemneted Error") def insert(self, measurement): raise Exception("Not implemented Error") def delete(self, measurementId): raise Exception("Not imlemented Error") def truncate(self): raise Exception("Not imlemented Error")
Add tuncate method to BaseStorage class
Add tuncate method to BaseStorage class This will provide an interface for supporting any new database, there by, making the code more robust.
Python
mit
muatik/flask-profiler
--- +++ @@ -16,3 +16,6 @@ def delete(self, measurementId): raise Exception("Not imlemented Error") + + def truncate(self): + raise Exception("Not imlemented Error")
d7299fd931ae62cc661b48dbc84aa161a395f1fa
fermipy/__init__.py
fermipy/__init__.py
import os __version__ = "unknown" try: from version import get_git_version __version__ = get_git_version() except Exception as message: print(message) __author__ = "Matthew Wood" PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) PACKAGE_DATA = os.path.join(PACKAGE_ROOT,'data') os.environ['FERMIPY_ROOT'] = PACKAGE_ROOT os.environ['FERMIPY_DATA_DIR'] = PACKAGE_DATA
from __future__ import absolute_import, division, print_function import os __version__ = "unknown" try: from .version import get_git_version __version__ = get_git_version() except Exception as message: print(message) __author__ = "Matthew Wood" PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) PACKAGE_DATA = os.path.join(PACKAGE_ROOT,'data') os.environ['FERMIPY_ROOT'] = PACKAGE_ROOT os.environ['FERMIPY_DATA_DIR'] = PACKAGE_DATA
Fix version module import for Python 3
Fix version module import for Python 3
Python
bsd-3-clause
jefemagril/fermipy,jefemagril/fermipy,jefemagril/fermipy,fermiPy/fermipy
--- +++ @@ -1,9 +1,10 @@ +from __future__ import absolute_import, division, print_function import os __version__ = "unknown" try: - from version import get_git_version + from .version import get_git_version __version__ = get_git_version() except Exception as message: print(message)
67795baac1f7eb10fbfc90fda5a9f54949af6c24
ckanext/tayside/helpers.py
ckanext/tayside/helpers.py
from ckan import model from ckan.plugins import toolkit def _get_action(action, context_dict, data_dict): return toolkit.get_action(action)(context_dict, data_dict) def get_groups(): # Helper used on the homepage for showing groups data_dict = { 'sort': 'package_count', 'limit': 7, 'all_fields': True } groups = _get_action('group_list', {}, data_dict) return groups
from ckan import model from ckan.plugins import toolkit def _get_action(action, context_dict, data_dict): return toolkit.get_action(action)(context_dict, data_dict) def get_groups(): # Helper used on the homepage for showing groups data_dict = { 'sort': 'package_count', 'all_fields': True } groups = _get_action('group_list', {}, data_dict) return groups
Remove limit of 7 groups in homepage
Remove limit of 7 groups in homepage
Python
agpl-3.0
ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside
--- +++ @@ -11,7 +11,6 @@ data_dict = { 'sort': 'package_count', - 'limit': 7, 'all_fields': True } groups = _get_action('group_list', {}, data_dict)
46009d28e2b6285722287ccbeaa8d2f9c6c47fde
ldap_dingens/default_config.py
ldap_dingens/default_config.py
from datetime import timedelta class DefaultConfiguration: DEBUG = False MAIL_SERVER = "localhost" MAIL_PORT = 25 MAIL_USER = None MAIL_PASSWORD = None MAIL_CAFILE = None INVITATION_SUBJECT = "Invitation to join the FSFW!" TOKEN_BYTES = 5 TOKEN_LIFETIME = timedelta(days=7) LOGIN_LIFETIME = timedelta(days=2) #: Host name of the LDAP server LDAP_SERVER = "localhost" #: str.format string to create a DN which refers to a user with a given #: loginname LDAP_USER_DN_FORMAT = "uid={loginname},ou=Account,dc=fsfw-dresden,dc=de" #: the DN to bind to for admin activity (create new users, change user #: info) LDAP_ADMIN_DN = "cn=AuthManager,ou=Management,dc=fsfw-dresden,dc=de" #: set this to the password for the LDAP_ADMIN_DN above LDAP_ADMIN_PASSWORD = ...
from datetime import timedelta class DefaultConfiguration: DEBUG = False MAIL_SERVER = "localhost" MAIL_PORT = 25 MAIL_USER = None MAIL_PASSWORD = None MAIL_CAFILE = None INVITATION_SUBJECT = "Invitation to join the FSFW!" TOKEN_BYTES = 5 TOKEN_LIFETIME = timedelta(days=7) LOGIN_LIFETIME = timedelta(days=2) LDAP_SERVER = "localhost"
Remove pointless default values from DefaultConfiguration
Remove pointless default values from DefaultConfiguration
Python
agpl-3.0
fsfw-dresden/ldap-dingens,fsfw-dresden/ldap-dingens
--- +++ @@ -16,16 +16,4 @@ LOGIN_LIFETIME = timedelta(days=2) - #: Host name of the LDAP server LDAP_SERVER = "localhost" - - #: str.format string to create a DN which refers to a user with a given - #: loginname - LDAP_USER_DN_FORMAT = "uid={loginname},ou=Account,dc=fsfw-dresden,dc=de" - - #: the DN to bind to for admin activity (create new users, change user - #: info) - LDAP_ADMIN_DN = "cn=AuthManager,ou=Management,dc=fsfw-dresden,dc=de" - - #: set this to the password for the LDAP_ADMIN_DN above - LDAP_ADMIN_PASSWORD = ...
8fad8a4f1591fb4a7d7d1bdf932c5918197b475c
tests/client.py
tests/client.py
# -*- coding: utf-8 -*- """ Description: Client side of sanity check Author: Mike Ellis Copyright 2017 Owner """ from htmltree import * def start(): console.log("Starting") newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) console.log(newcontent.render(0)) document.body.innerHTML = newcontent.render() console.log("Finished") document.addEventListener('DOMContentLoaded', start)
# -*- coding: utf-8 -*- """ Description: Client side of sanity check Uses JS functions insertAdjacentHTML, innerHTML and addEventListener. See https://developer.mozilla.org/en-US/docs/Web/API/Element/insertAdjacentHTML https://developer.mozilla.org/en-US/docs/Web/API/Element/innerHTML https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/addEventListener Author: Mike Ellis Copyright 2017 Owner """ from htmltree import * def start(): console.log("Starting") ## insert a style element at the end of the <head? cssrules = {'.test':{'color':'green', 'text-align':'center'}} style = Style(**cssrules) document.head.insertAdjacentHTML('beforeend', style.render()) ## Replace the <body> content newcontent = Div(H1("Sanity check PASS", _class='test')) document.body.innerHTML = newcontent.render() console.log("Finished") ## JS is event driven. ## Wait for DOM load to complete before firing ## our start() function. document.addEventListener('DOMContentLoaded', start)
Fix <style> rendering under Transcrypt.
Fix <style> rendering under Transcrypt. The hasattr test in renderCss() was failing when it shouldn't have. Fixed by removal. Updated tests/client.py to create and append a style element to detect problems related to Style() on the client side.
Python
mit
Michael-F-Ellis/htmltree
--- +++ @@ -1,15 +1,29 @@ # -*- coding: utf-8 -*- """ Description: Client side of sanity check +Uses JS functions insertAdjacentHTML, innerHTML and addEventListener. +See https://developer.mozilla.org/en-US/docs/Web/API/Element/insertAdjacentHTML + https://developer.mozilla.org/en-US/docs/Web/API/Element/innerHTML + https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/addEventListener + Author: Mike Ellis Copyright 2017 Owner """ from htmltree import * def start(): console.log("Starting") - newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green')) - console.log(newcontent.render(0)) + ## insert a style element at the end of the <head? + cssrules = {'.test':{'color':'green', 'text-align':'center'}} + style = Style(**cssrules) + document.head.insertAdjacentHTML('beforeend', style.render()) + + ## Replace the <body> content + newcontent = Div(H1("Sanity check PASS", _class='test')) document.body.innerHTML = newcontent.render() console.log("Finished") + +## JS is event driven. +## Wait for DOM load to complete before firing +## our start() function. document.addEventListener('DOMContentLoaded', start)
e91eac0c667c74062672a1a2cdb7da2a910f8cf7
InvenTree/users/serializers.py
InvenTree/users/serializers.py
from rest_framework import serializers from django.contrib.auth.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for a User """ class Meta: model = User fields = ('username', 'first_name', 'last_name', 'email',)
from rest_framework import serializers from django.contrib.auth.models import User class UserSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for a User """ class Meta: model = User fields = ('pk', 'username', 'first_name', 'last_name', 'email',)
Include PK in user serializer
Include PK in user serializer
Python
mit
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree
--- +++ @@ -8,7 +8,8 @@ class Meta: model = User - fields = ('username', + fields = ('pk', + 'username', 'first_name', 'last_name', 'email',)
bf9866e2c337f024fcc02de69456a235dc7ac07c
labs/lab-6/common.py
labs/lab-6/common.py
#!/usr/bin/env python # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import sys import os from tspapi import API class Common(object): def __init__(self, ): self.api = API() self.usage_args = "" @staticmethod def usage(self, args): sys.stderr.write("usage: {0} {1}\n".format(os.path.basename(sys.argv[0]), args)) def send_measurements(self, measurements): """ Sends measurements using the Measurement API :param measurements: :return: None """ self.api.measurement_create_batch(measurements) def run(self): """ Main loop """ while True: print("Doing absolutely nothing") time.sleep(self.interval)
#!/usr/bin/env python # Copyright 2014-2015 Boundary, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import time import sys import os from tspapi import API class Common(object): def __init__(self, ): self.api = API() self.usage_args = "" # Set our application id from the environment variable self.appl_id = os.environ['TSI_APPL_ID'] @staticmethod def usage(args): sys.stderr.write("usage: {0} {1}\n".format(os.path.basename(sys.argv[0]), args)) def send_measurements(self, measurements): """ Sends measurements using the Measurement API :param measurements: :return: None """ self.api.measurement_create_batch(measurements) def run(self): """ Main loop """ while True: print("Doing absolutely nothing") time.sleep(self.interval)
Add application id and static method for usage
Add application id and static method for usage
Python
apache-2.0
jdgwartney/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab
--- +++ @@ -22,9 +22,11 @@ def __init__(self, ): self.api = API() self.usage_args = "" + # Set our application id from the environment variable + self.appl_id = os.environ['TSI_APPL_ID'] @staticmethod - def usage(self, args): + def usage(args): sys.stderr.write("usage: {0} {1}\n".format(os.path.basename(sys.argv[0]), args)) def send_measurements(self, measurements):
eec24c2cff1b588b957215a867a85a148f4e71e9
tuneme/views.py
tuneme/views.py
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.shortcuts import render from molo.core.models import ArticlePage from molo.commenting.models import MoloComment from wagtail.wagtailsearch.models import Query def search(request, results_per_page=10): search_query = request.GET.get('q', None) page = request.GET.get('p', 1) if search_query: results = ArticlePage.objects.live().search(search_query) Query.get(search_query).add_hit() else: results = ArticlePage.objects.none() paginator = Paginator(results, results_per_page) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search_results.html', { 'search_query': search_query, 'search_results': search_results, 'results': results, }) def report_response(request, comment_pk): comment = MoloComment.objects.get(pk=comment_pk) return render(request, 'comments/report_response.html', { 'article': comment.content_object, })
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.shortcuts import render from django.utils.translation import get_language_from_request from molo.core.utils import get_locale_code from molo.core.models import ArticlePage from molo.commenting.models import MoloComment from wagtail.wagtailsearch.models import Query def search(request, results_per_page=10): search_query = request.GET.get('q', None) page = request.GET.get('p', 1) locale = get_locale_code(get_language_from_request(request)) if search_query: results = ArticlePage.objects.filter( languages__language__locale=locale).live().search(search_query) Query.get(search_query).add_hit() else: results = ArticlePage.objects.none() paginator = Paginator(results, results_per_page) try: search_results = paginator.page(page) except PageNotAnInteger: search_results = paginator.page(1) except EmptyPage: search_results = paginator.page(paginator.num_pages) return render(request, 'search/search_results.html', { 'search_query': search_query, 'search_results': search_results, 'results': results, }) def report_response(request, comment_pk): comment = MoloComment.objects.get(pk=comment_pk) return render(request, 'comments/report_response.html', { 'article': comment.content_object, })
Add multi-languages support for search
Add multi-languages support for search
Python
bsd-2-clause
praekelt/molo-tuneme,praekelt/molo-tuneme,praekelt/molo-tuneme,praekelt/molo-tuneme
--- +++ @@ -1,6 +1,8 @@ from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.shortcuts import render +from django.utils.translation import get_language_from_request +from molo.core.utils import get_locale_code from molo.core.models import ArticlePage from molo.commenting.models import MoloComment from wagtail.wagtailsearch.models import Query @@ -9,9 +11,11 @@ def search(request, results_per_page=10): search_query = request.GET.get('q', None) page = request.GET.get('p', 1) + locale = get_locale_code(get_language_from_request(request)) if search_query: - results = ArticlePage.objects.live().search(search_query) + results = ArticlePage.objects.filter( + languages__language__locale=locale).live().search(search_query) Query.get(search_query).add_hit() else: results = ArticlePage.objects.none()
46fc6c7f8f63ce747a30a35bb5fb33ff2d53a2c0
mackerel/host.py
mackerel/host.py
# -*- coding: utf-8 -*- """ mackerel.host ~~~~~~~~~~~~~ Mackerel client implemented by Pyton. Ported from `mackerel-client-ruby`. <https://github.com/mackerelio/mackerel-client-ruby> :copyright: (c) 2014 Hatena, All rights reserved. :copyright: (c) 2015 Shinya Ohyanagi, All rights reserved. :license: BSD, see LICENSE for more details. """ import re class Host(object): MACKEREL_INTERFACE_NAME_PATTERN = re.compile(r'^eth\d') def __init__(self, **kwargs): self.args = kwargs self.name = kwargs.get('name') self.meta = kwargs.get('meta') self.type = kwargs.get('type') self.status = kwargs.get('status') self.memo = kwargs.get('memo') self.is_retired = kwargs.get('isRetired') self.id = kwargs.get('id') self.created_at = kwargs.get('createdAt') self.roles = kwargs.get('roles') self.interfaces = kwargs.get('interfaces') def ip_addr(self): pass def mac_addr(self): pass
# -*- coding: utf-8 -*- """ mackerel.host ~~~~~~~~~~~~~ Mackerel client implemented by Pyton. Ported from `mackerel-client-ruby`. <https://github.com/mackerelio/mackerel-client-ruby> :copyright: (c) 2014 Hatena, All rights reserved. :copyright: (c) 2015 Shinya Ohyanagi, All rights reserved. :license: BSD, see LICENSE for more details. """ import re class Host(object): MACKEREL_INTERFACE_NAME_PATTERN = re.compile(r'^eth\d') def __init__(self, **kwargs): self.args = kwargs self.name = kwargs.get('name', None) self.meta = kwargs.get('meta', None) self.type = kwargs.get('type', None) self.status = kwargs.get('status', None) self.memo = kwargs.get('memo', None) self.is_retired = kwargs.get('isRetired', None) self.id = kwargs.get('id', None) self.created_at = kwargs.get('createdAt', None) self.roles = kwargs.get('roles', None) self.interfaces = kwargs.get('interfaces', None) def ip_addr(self): pass def mac_addr(self): pass
Add None if kwargs can not get.
Add None if kwargs can not get.
Python
bsd-3-clause
heavenshell/py-mackerel-client
--- +++ @@ -20,16 +20,16 @@ def __init__(self, **kwargs): self.args = kwargs - self.name = kwargs.get('name') - self.meta = kwargs.get('meta') - self.type = kwargs.get('type') - self.status = kwargs.get('status') - self.memo = kwargs.get('memo') - self.is_retired = kwargs.get('isRetired') - self.id = kwargs.get('id') - self.created_at = kwargs.get('createdAt') - self.roles = kwargs.get('roles') - self.interfaces = kwargs.get('interfaces') + self.name = kwargs.get('name', None) + self.meta = kwargs.get('meta', None) + self.type = kwargs.get('type', None) + self.status = kwargs.get('status', None) + self.memo = kwargs.get('memo', None) + self.is_retired = kwargs.get('isRetired', None) + self.id = kwargs.get('id', None) + self.created_at = kwargs.get('createdAt', None) + self.roles = kwargs.get('roles', None) + self.interfaces = kwargs.get('interfaces', None) def ip_addr(self): pass
63a893add1170c1e90cdb8eaea6c1e1c6a3a8e0a
9.py
9.py
"""Python challenge solution #9: http://www.pythonchallenge.com/pc/return/good.html""" def main(): pass if __name__ == "__main__": main()
"""Python challenge solution #9: http://www.pythonchallenge.com/pc/return/good.html""" import urllib import urllib2 from PIL import Image, ImageDraw un = 'huge' pw = 'file' url = 'http://www.pythonchallenge.com/pc/return/good.jpg' def setup_auth_handler(): password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, url, un, pw) handler = urllib2.HTTPBasicAuthHandler(password_mgr) opener = urllib2.build_opener(handler) opener.open(url) urllib2.install_opener(opener) def main(): setup_auth_handler() img = urllib2.urlopen('http://www.pythonchallenge.com/pc/return/good.jpg') im = Image.open(img) draw = ImageDraw.Draw(im) draw.line([(0, 0), im.size], fill=128) im.show() if __name__ == "__main__": main()
Add authentication handler for opening image.
Add authentication handler for opening image.
Python
mit
bm5w/pychal
--- +++ @@ -1,10 +1,30 @@ """Python challenge solution #9: http://www.pythonchallenge.com/pc/return/good.html""" +import urllib +import urllib2 +from PIL import Image, ImageDraw +un = 'huge' +pw = 'file' +url = 'http://www.pythonchallenge.com/pc/return/good.jpg' + + +def setup_auth_handler(): + password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() + password_mgr.add_password(None, url, un, pw) + handler = urllib2.HTTPBasicAuthHandler(password_mgr) + opener = urllib2.build_opener(handler) + opener.open(url) + urllib2.install_opener(opener) def main(): - pass + setup_auth_handler() + img = urllib2.urlopen('http://www.pythonchallenge.com/pc/return/good.jpg') + im = Image.open(img) + draw = ImageDraw.Draw(im) + draw.line([(0, 0), im.size], fill=128) + im.show() if __name__ == "__main__": main()
f3bbfd5221a3d7b3e394a70853d0a7dc1b5eeeac
knights/base.py
knights/base.py
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.nodelist = parse.parse(raw) code = ast.Expression( body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ast.Name(id='context', ctx=ast.Load())], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.parser = parse.Parser(raw) self.nodelist = self.parser() code = ast.Expression( body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ast.Name(id='context', ctx=ast.Load())], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
Update Template for Parser class
Update Template for Parser class
Python
mit
funkybob/knights-templater,funkybob/knights-templater
--- +++ @@ -7,7 +7,8 @@ class Template: def __init__(self, raw): self.raw = raw - self.nodelist = parse.parse(raw) + self.parser = parse.Parser(raw) + self.nodelist = self.parser() code = ast.Expression( body=ast.GeneratorExp(
52d32849f4cd38ca7a0fcfc0418e9e9580dd426a
kimochiconsumer/views.py
kimochiconsumer/views.py
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('1') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('1') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
Use the gallery_image method for required information
Use the gallery_image method for required information
Python
mit
matslindh/kimochi-consumer
--- +++ @@ -25,7 +25,7 @@ @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): - data = request.kimochi.gallery(request.matchdict['gallery_id']) + data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound
cc6ce477550152135eed5a9e35bca8144be10111
groupmestats/plotly_helpers.py
groupmestats/plotly_helpers.py
import plotly def try_saving_plotly_figure(figure, filename): try: plotly.plotly.image.save_as(figure, filename) except plotly.exceptions.PlotlyError as e: if 'The response from plotly could not be translated.'in str(e): print("Failed to save plotly figure. <home>/.plotly/.credentials" " might not be configured correctly? " "Or you may have hit your plotly account's rate limit" " (http://help.plot.ly/api-rate-limits/)") else: raise # A green bar with slightly darker green line marker = dict( color='#4BB541', line=dict( color='#3A9931', width=1.5, ) )
import plotly def try_saving_plotly_figure(figure, filename): try: print("Saving plot to '%s'" % filename) plotly.plotly.image.save_as(figure, filename) except plotly.exceptions.PlotlyError as e: if 'The response from plotly could not be translated.'in str(e): print("Failed to save plotly figure. <home>/.plotly/.credentials" " might not be configured correctly? " "Or you may have hit your plotly account's rate limit" " (http://help.plot.ly/api-rate-limits/)") else: raise # A green bar with slightly darker green line marker = dict( color='#4BB541', line=dict( color='#3A9931', width=1.5, ) )
Print when saving plot to file
Print when saving plot to file
Python
mit
kjteske/groupmestats,kjteske/groupmestats
--- +++ @@ -2,6 +2,7 @@ def try_saving_plotly_figure(figure, filename): try: + print("Saving plot to '%s'" % filename) plotly.plotly.image.save_as(figure, filename) except plotly.exceptions.PlotlyError as e: if 'The response from plotly could not be translated.'in str(e):
e2ca99c9f3548fa0d4e46bdd3b309ebd0e658ffa
test/backend/wayland/conftest.py
test/backend/wayland/conftest.py
import contextlib import os from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" raise NotImplementedError def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" raise NotImplementedError
import contextlib import os import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend wlr_env = { "WLR_BACKENDS": "headless", "WLR_LIBINPUT_NO_DEVICES": "1", "WLR_RENDERER_ALLOW_SOFTWARE": "1", "WLR_RENDERER": "pixman", } @contextlib.contextmanager def wayland_environment(outputs): """This backend just needs some environmental variables set""" env = wlr_env.copy() env["WLR_HEADLESS_OUTPUTS"] = str(outputs) yield env class WaylandBackend(Backend): def __init__(self, env, args=()): self.env = env self.args = args self.core = Core self.manager = None def create(self): """This is used to instantiate the Core""" os.environ.update(self.env) return self.core(*self.args) def configure(self, manager): """This backend needs to get WAYLAND_DISPLAY variable.""" success, display = manager.c.eval("self.core.display_name") assert success self.env["WAYLAND_DISPLAY"] = display def fake_click(self, x, y): """Click at the specified coordinates""" self.manager.c.eval(textwrap.dedent(""" self.core._focus_by_click() self.core._process_cursor_button(1, True) """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" success, result = self.manager.c.eval(textwrap.dedent(""" [win.wid for win in self.core.mapped_windows] """)) assert success return eval(result)
Add Wayland Backend.fake_click and Backend.get_all_windows methods
Add Wayland Backend.fake_click and Backend.get_all_windows methods These work by eval-ing in the test Qtile instance. It might be nicer to instead make these cmd_s on the `Core` if/when we expose cmd_ methods from the Core.
Python
mit
ramnes/qtile,ramnes/qtile,qtile/qtile,qtile/qtile
--- +++ @@ -1,5 +1,6 @@ import contextlib import os +import textwrap from libqtile.backend.wayland.core import Core from test.helpers import Backend @@ -40,8 +41,15 @@ def fake_click(self, x, y): """Click at the specified coordinates""" - raise NotImplementedError + self.manager.c.eval(textwrap.dedent(""" + self.core._focus_by_click() + self.core._process_cursor_button(1, True) + """)) def get_all_windows(self): """Get a list of all windows in ascending order of Z position""" - raise NotImplementedError + success, result = self.manager.c.eval(textwrap.dedent(""" + [win.wid for win in self.core.mapped_windows] + """)) + assert success + return eval(result)
33c26aab9ff4e391f9dde2bfe873f86db4ce126e
opal/tests/test_user_profile.py
opal/tests/test_user_profile.py
""" Tests for opal.models.UserProfile """ from django.test import TestCase from django.contrib.auth.models import User from opal.models import UserProfile, Team class UserProfileTest(TestCase): def setUp(self): self.user = User(username='testing') self.user.save() self.profile, _ = UserProfile.objects.get_or_create(user=self.user) def test_get_roles(self): self.assertEqual({'default': []}, self.profile.get_roles()) def test_get_teams(self): teams = list(Team.objects.filter(active=True, restricted=False)) user_teams = self.profile.get_teams() for t in teams: self.assertIn(t, user_teams)
""" Tests for opal.models.UserProfile """ from django.contrib.auth.models import User from django.test import TestCase from mock import patch from opal.models import UserProfile, Team class UserProfileTest(TestCase): def setUp(self): self.user = User(username='testing') self.user.save() self.profile, _ = UserProfile.objects.get_or_create(user=self.user) def test_get_roles(self): self.assertEqual({'default': []}, self.profile.get_roles()) def test_get_teams(self): teams = list(Team.objects.filter(active=True, restricted=False)) user_teams = self.profile.get_teams() for t in teams: self.assertIn(t, user_teams) def test_can_see_pid(self): with patch.object(UserProfile, 'get_roles') as mock_roles: mock_roles.return_value = dict(default=['scientist']) self.assertEqual(False, self.profile.can_see_pid) def test_explicit_access_only(self): with patch.object(UserProfile, 'get_roles') as mock_roles: mock_roles.return_value = dict(default=['scientist']) self.assertEqual(True, self.profile.explicit_access_only)
Add tests for userprofile properties
Add tests for userprofile properties
Python
agpl-3.0
khchine5/opal,khchine5/opal,khchine5/opal
--- +++ @@ -1,9 +1,9 @@ """ Tests for opal.models.UserProfile """ +from django.contrib.auth.models import User from django.test import TestCase - -from django.contrib.auth.models import User +from mock import patch from opal.models import UserProfile, Team @@ -21,4 +21,14 @@ teams = list(Team.objects.filter(active=True, restricted=False)) user_teams = self.profile.get_teams() for t in teams: - self.assertIn(t, user_teams) + self.assertIn(t, user_teams) + + def test_can_see_pid(self): + with patch.object(UserProfile, 'get_roles') as mock_roles: + mock_roles.return_value = dict(default=['scientist']) + self.assertEqual(False, self.profile.can_see_pid) + + def test_explicit_access_only(self): + with patch.object(UserProfile, 'get_roles') as mock_roles: + mock_roles.return_value = dict(default=['scientist']) + self.assertEqual(True, self.profile.explicit_access_only)
50fd28e67109f47893a6d38ede7f64758f3fe618
consts/auth_type.py
consts/auth_type.py
class AuthType(object): """ An auth type defines what write privileges an authenticated agent has. """ EVENT_DATA = 0 MATCH_VIDEO = 1 EVENT_TEAMS = 2 EVENT_MATCHES = 3 EVENT_RANKINGS = 4 EVENT_ALLIANCES = 5 EVENT_AWARDS = 6 type_names = { EVENT_DATA: "event data", MATCH_VIDEO: "match video", EVENT_TEAMS: "event teams", EVENT_MATCHES: "event matches", EVENT_RANKINGS: "event rankings", EVENT_ALLIANCES: "event alliances", EVENT_AWARDS: "event awrads" }
class AuthType(object): """ An auth type defines what write privileges an authenticated agent has. """ EVENT_DATA = 0 # DEPRECATED - USE FINER PERMISSIONS INSTEAD MATCH_VIDEO = 1 EVENT_TEAMS = 2 EVENT_MATCHES = 3 EVENT_RANKINGS = 4 EVENT_ALLIANCES = 5 EVENT_AWARDS = 6 type_names = { EVENT_DATA: "event data", MATCH_VIDEO: "match video", EVENT_TEAMS: "event teams", EVENT_MATCHES: "event matches", EVENT_RANKINGS: "event rankings", EVENT_ALLIANCES: "event alliances", EVENT_AWARDS: "event awrads" }
Add deprecation notice for AuthType.EVENT_DATA
Add deprecation notice for AuthType.EVENT_DATA
Python
mit
tsteward/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance
--- +++ @@ -2,7 +2,7 @@ """ An auth type defines what write privileges an authenticated agent has. """ - EVENT_DATA = 0 + EVENT_DATA = 0 # DEPRECATED - USE FINER PERMISSIONS INSTEAD MATCH_VIDEO = 1 EVENT_TEAMS = 2 EVENT_MATCHES = 3
5c40cbfcb89649738945eda02c1bfb804e2ecdae
us_ignite/mailinglist/views.py
us_ignite/mailinglist/views.py
import hashlib import mailchimp from django.contrib import messages from django.conf import settings from django.shortcuts import redirect from django.template.response import TemplateResponse from us_ignite.mailinglist.forms import EmailForm def subscribe_email(email): master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY) mailing_list = mailchimp.Lists(master) uid = hashlib.md5(email).hexdigest() email_data = { 'email': email, 'euid': uid, 'leid': uid, } return mailing_list.subscribe( settings.MAILCHIMP_LIST, email_data) def mailing_subscribe(request): """Handles MailChimp email registration.""" if request.method == 'POST': form = EmailForm(request.POST) if form.is_valid(): try: subscribe_email(form.cleaned_data['email']) messages.success(request, 'Successfully subscribed.') redirect_to = 'home' except mailchimp.ListAlreadySubscribedError: messages.error(request, 'Already subscribed.') redirect_to = 'mailing_subscribe' return redirect(redirect_to) else: form = EmailForm() context = { 'form': form, } return TemplateResponse(request, 'mailinglist/form.html', context)
import hashlib import logging import mailchimp from django.contrib import messages from django.conf import settings from django.shortcuts import redirect from django.template.response import TemplateResponse from us_ignite.mailinglist.forms import EmailForm logger = logging.getLogger('us_ignite.mailinglist.views') def subscribe_email(email): master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY) mailing_list = mailchimp.Lists(master) uid = hashlib.md5(email).hexdigest() email_data = { 'email': email, 'euid': uid, 'leid': uid, } return mailing_list.subscribe( settings.MAILCHIMP_LIST, email_data) def mailing_subscribe(request): """Handles MailChimp email registration.""" if request.method == 'POST': form = EmailForm(request.POST) if form.is_valid(): try: subscribe_email(form.cleaned_data['email']) messages.success(request, 'Successfully subscribed.') redirect_to = 'home' except mailchimp.ListAlreadySubscribedError: messages.error(request, 'Already subscribed.') redirect_to = 'mailing_subscribe' except Exception, e: logger.exception(e) msg = (u'There is a problem with the maling list. ' 'Please try again later.') messages.error(request, msg) redirect_to = 'mailing_subscribe' return redirect(redirect_to) else: form = EmailForm() context = { 'form': form, } return TemplateResponse(request, 'mailinglist/form.html', context)
Improve handling of errors during mailing list subscription.
Improve handling of errors during mailing list subscription. https://github.com/madewithbytes/us_ignite/issues/209 Any exception thrown by the mailchimp component will be handled gracefully and logged.
Python
bsd-3-clause
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
--- +++ @@ -1,4 +1,5 @@ import hashlib +import logging import mailchimp from django.contrib import messages @@ -7,6 +8,8 @@ from django.template.response import TemplateResponse from us_ignite.mailinglist.forms import EmailForm + +logger = logging.getLogger('us_ignite.mailinglist.views') def subscribe_email(email): @@ -34,6 +37,12 @@ except mailchimp.ListAlreadySubscribedError: messages.error(request, 'Already subscribed.') redirect_to = 'mailing_subscribe' + except Exception, e: + logger.exception(e) + msg = (u'There is a problem with the maling list. ' + 'Please try again later.') + messages.error(request, msg) + redirect_to = 'mailing_subscribe' return redirect(redirect_to) else: form = EmailForm()