commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
42221c7b858951376ba59385fa42cac11d542fdd | plugin/script/sphinxexampleae.py | plugin/script/sphinxexampleae.py |
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
|
float_attr_help = """
This is the *annotation* for the floatAttr attribute
Here are some bullet points pertaining to this attribute
- The help is written in rst
- I don't know what else to put in the list
"""
string_attr_help = """
This is the *annotation* for the stringAttr attribute
"""
def process( node_name, handle ):
handle.editorTemplate( beginScrollLayout=True )
handle.editorTemplate( beginLayout="Float Attributes" )
handle.editorTemplate( "floatAttr", addControl=True, annotation=float_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( beginLayout="String Attributes" )
handle.editorTemplate( "stringAttr", addControl=True, annotation=string_attr_help )
handle.editorTemplate( endLayout=True )
handle.editorTemplate( addExtraControls=True )
handle.editorTemplate( endScrollLayout=True )
handle.editorTemplate( suppress="caching" )
handle.editorTemplate( suppress="nodeState" )
def ae_template( node_name ):
from maya import cmds
maya_handle = MayaHandle( cmds )
process( node_name, maya_handle )
| Add another attribute and some annotations | Add another attribute and some annotations
We write the annotations in rst for the moment.
| Python | bsd-3-clause | michaeljones/sphinx-maya-node |
59030daa60a4d2006cae6192219071e2a8017364 | test/conftest.py | test/conftest.py | from os.path import join, dirname, abspath
default_base_dir = join(dirname(abspath(__file__)), 'completion')
import run
def pytest_addoption(parser):
parser.addoption(
"--base-dir", default=default_base_dir,
help="Directory in which integration test case files locate.")
parser.addoption(
"--thirdparty",
help="Include integration tests that requires third party modules.")
def pytest_generate_tests(metafunc):
"""
:type metafunc: _pytest.python.Metafunc
"""
if 'case' in metafunc.fixturenames:
base_dir = metafunc.config.option.base_dir
test_files = {}
thirdparty = metafunc.config.option.thirdparty
metafunc.parametrize(
'case',
run.collect_dir_tests(base_dir, test_files, thirdparty))
| from os.path import join, dirname, abspath
default_base_dir = join(dirname(abspath(__file__)), 'completion')
import run
def pytest_addoption(parser):
parser.addoption(
"--base-dir", default=default_base_dir,
help="Directory in which integration test case files locate.")
parser.addoption(
"--test-files", "-T", default=[], action='append',
help=(
"Specify test files using FILE_NAME[:LINE[,LINE[,...]]]. "
"For example: -T generators.py:10,13,19. "
"Note that you can use -m to specify the test case by id."))
parser.addoption(
"--thirdparty",
help="Include integration tests that requires third party modules.")
def parse_test_files_option(opt):
"""
Parse option passed to --test-files into a key-value pair.
>>> parse_test_files_option('generators.py:10,13,19')
('generators.py', [10, 13, 19])
"""
opt = str(opt)
if ':' in opt:
(f_name, rest) = opt.split(':', 1)
return (f_name, list(map(int, rest.split(','))))
else:
return (opt, [])
def pytest_generate_tests(metafunc):
"""
:type metafunc: _pytest.python.Metafunc
"""
if 'case' in metafunc.fixturenames:
base_dir = metafunc.config.option.base_dir
test_files = dict(map(parse_test_files_option,
metafunc.config.option.test_files))
thirdparty = metafunc.config.option.thirdparty
metafunc.parametrize(
'case',
run.collect_dir_tests(base_dir, test_files, thirdparty))
| Add --test-files option to py.test | Add --test-files option to py.test
At this point, py.test should be equivalent to test/run.py
| Python | mit | tjwei/jedi,jonashaag/jedi,mfussenegger/jedi,jonashaag/jedi,dwillmer/jedi,WoLpH/jedi,tjwei/jedi,mfussenegger/jedi,dwillmer/jedi,flurischt/jedi,WoLpH/jedi,flurischt/jedi |
4aa1623e08519127a06f49060d546c5ef18e906c | vcs/models.py | vcs/models.py | from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
| from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.OneToOneField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.OneToOneField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
| Use a OneToMany field for the activity joiner. | Use a OneToMany field for the activity joiner.
| Python | bsd-3-clause | AeroNotix/django-timetracker,AeroNotix/django-timetracker,AeroNotix/django-timetracker |
d43750206ef97a39f4bb7cd7d4e69d4f634c13e1 | api/runserver.py | api/runserver.py | import os
from ricardo_api import app
isDebug = False
if os.environ['FLASK_ENV'] == "development":
isDebug = True
app.run(host= '0.0.0.0', debug=isDebug)
| import os
from ricardo_api import app
isDebug = False
if 'FLASK_ENV' in os.environ and os.environ['FLASK_ENV'] == "development":
isDebug = True
app.run(host= '0.0.0.0', debug=isDebug)
| Correct debug mode with env. | [api]: Correct debug mode with env.
| Python | agpl-3.0 | medialab/ricardo,medialab/ricardo,medialab/ricardo,medialab/ricardo |
98e824af43b729eb5b5737597506a5ca87312814 | apps/polls/tests.py | apps/polls/tests.py | """
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
| import datetime
from django.test import TestCase
from django.utils import timezone
from apps.polls.models import Poll
class PollMethoTests(TestCase):
def test_was_published_recently_with_future_poll(self):
"""
was_published_recently() should return False for polls whose
pub_date is in the future
"""
future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30))
self.assertEqual(future_poll.was_published_recently(),False)
| Create a test to expose the bug | Create a test to expose the bug
| Python | bsd-3-clause | hoale/teracy-tutorial,hoale/teracy-tutorial |
990008a6fb2788d25445ee9ec21375515527bdc8 | nodeconductor/backup/utils.py | nodeconductor/backup/utils.py | import pkg_resources
from django.utils import six
from django.utils.lru_cache import lru_cache
@lru_cache()
def get_backup_strategies():
entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {})
strategies = dict((name.upper(), entry_point.load()) for name, entry_point in entry_points.iteritems())
return strategies
def has_object_backup_strategy(obj):
strategies = get_backup_strategies()
return obj.__class__.__name__.upper() in strategies
def get_object_backup_strategy(obj):
strategies = get_backup_strategies()
return strategies[obj.__class__.__name__.upper()]
def get_backupable_models():
strategies = get_backup_strategies()
return [strategy.get_model() for strategy in six.itervalues(strategies)]
| import pkg_resources
from django.utils import six
from django.utils.lru_cache import lru_cache
@lru_cache()
def get_backup_strategies():
entry_points = pkg_resources.get_entry_map('nodeconductor').get('backup_strategies', {})
strategies = {name.upper(): entry_point.load() for name, entry_point in six.iteritems(entry_points)}
return strategies
def has_object_backup_strategy(obj):
strategies = get_backup_strategies()
return obj.__class__.__name__.upper() in strategies
def get_object_backup_strategy(obj):
strategies = get_backup_strategies()
return strategies[obj.__class__.__name__.upper()]
def get_backupable_models():
strategies = get_backup_strategies()
return [strategy.get_model() for strategy in six.itervalues(strategies)]
| Use new comprehension syntax and six (nc-263) | Use new comprehension syntax and six (nc-263)
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor |
f376eb478783448b5e372c2c4a7f7ee0e4891e88 | examples/python/values.py | examples/python/values.py | #! /usr/bin/env python
#
# values.py
#
"""
An example of using values via Python API
"""
from opencog.atomspace import AtomSpace, TruthValue
from opencog.type_constructors import *
a = AtomSpace()
set_type_ctor_atomspace(a)
a = FloatValue([1.0, 2.0, 3.0])
b = FloatValue([1.0, 2.0, 3.0])
c = FloatValue(1.0)
print('{} == {}: {}'.format(a, b, a == b))
print('{} == {}: {}'.format(a, c, a == c))
featureValue = FloatValue([1.0, 2])
print('new value created: {}'.format(featureValue))
boundingBox = ConceptNode('boundingBox')
featureKey = PredicateNode('features')
boundingBox.set_value(featureKey, featureValue)
print('set value to atom: {}'.format(boundingBox))
print('get value from atom: {}'.format(boundingBox.get_value(featureKey)))
| #! /usr/bin/env python
#
# values.py
#
"""
An example of using values via Python API
"""
from opencog.atomspace import AtomSpace, TruthValue
from opencog.type_constructors import *
from opencog.scheme_wrapper import scheme_eval_v
atomspace = AtomSpace()
set_type_ctor_atomspace(atomspace)
a = FloatValue([1.0, 2.0, 3.0])
b = FloatValue([1.0, 2.0, 3.0])
c = FloatValue(1.0)
print('{} == {}: {}'.format(a, b, a == b))
print('{} == {}: {}'.format(a, c, a == c))
featureValue = FloatValue([1.0, 2])
print('new value created: {}'.format(featureValue))
boundingBox = ConceptNode('boundingBox')
featureKey = PredicateNode('features')
boundingBox.set_value(featureKey, featureValue)
print('set value to atom: {}'.format(boundingBox))
print('get value from atom: {}'.format(boundingBox.get_value(featureKey)))
value = scheme_eval_v(atomspace, '(ValueOf (ConceptNode "boundingBox") '
'(PredicateNode "features"))')
value = boundingBox.get_value(featureKey)
print('get value from atom using Scheme program: {}'.format(value))
| Add example of scheme_eval_v usage | Add example of scheme_eval_v usage
| Python | agpl-3.0 | rTreutlein/atomspace,rTreutlein/atomspace,rTreutlein/atomspace,AmeBel/atomspace,rTreutlein/atomspace,AmeBel/atomspace,AmeBel/atomspace,AmeBel/atomspace,rTreutlein/atomspace,AmeBel/atomspace |
d76c2764d5489308064011e291f9181eac4a3fd6 | feed_sources/NJTransit.py | feed_sources/NJTransit.py | """Fetch NJ TRANSIT bus and rail feeds.
Requires username and password to log in first.
Cannot check for whether a feed is new or not, so only call to fetch this one once
an email is sent to the developer account saying new feeds are available.
"""
import logging
import requests
from FeedSource import FeedSource
LOG = logging.getLogger(__name__)
LOGIN_URL = 'https://www.njtransit.com/mt/mt_servlet.srv?hdnPageAction=MTDevLoginSubmitTo'
URL = 'https://www.njtransit.com/mt/mt_servlet.srv?hdnPageAction=MTDevResourceDownloadTo&Category='
class NJTransit(FeedSource):
"""Create session to fetch NJ TRANSIT feed bus and rail feeds."""
def __init__(self):
super(NJTransit, self).__init__()
self.urls = {'nj_bus.zip': URL + 'bus', 'nj_rail.zip': URL + 'rail'}
self.nj_payload = {} # need to set username and password in this to log in
def fetch(self):
"""Fetch NJ TRANSIT bus and rail feeds.
First logs on to create session before fetching and validating downloads.
"""
session = requests.Session()
session.post(LOGIN_URL, data=self.nj_payload)
for filename in self.urls:
url = self.urls.get(filename)
if self.fetchone(filename, url, session=session):
self.write_status()
session.close()
| """Fetch NJ TRANSIT bus and rail feeds.
Requires username and password to log in first.
Cannot check for whether a feed is new or not, so only call to fetch this one once
an email is sent to the developer account saying new feeds are available.
"""
import logging
import requests
from FeedSource import FeedSource
LOG = logging.getLogger(__name__)
LOGIN_URL = 'https://www.njtransit.com/mt/mt_servlet.srv?hdnPageAction=MTDevLoginSubmitTo'
URL = 'https://www.njtransit.com/mt/mt_servlet.srv?hdnPageAction=MTDevResourceDownloadTo&Category='
class NJTransit(FeedSource):
"""Create session to fetch NJ TRANSIT feed bus and rail feeds."""
def __init__(self):
super(NJTransit, self).__init__()
self.urls = {'nj_bus.zip': URL + 'bus', 'nj_rail.zip': URL + 'rail'}
self.nj_payload = {} # need to set username and password in this to log in
def fetch(self):
"""Fetch NJ TRANSIT bus and rail feeds.
First logs on to create session before fetching and validating downloads.
"""
session = requests.Session()
login = session.post(LOGIN_URL, data=self.nj_payload)
if login.ok:
LOG.debug('Logged in to NJ TRANSIT successfully.')
for filename in self.urls:
url = self.urls.get(filename)
if self.fetchone(filename, url, session=session):
self.write_status()
else:
LOG.error('Failed to log in to NJ TRANSIT. Response status: %s: %s.',
login.status_code, login.reason)
session.close()
| Add logging for NJ TRANSIT login status | Add logging for NJ TRANSIT login status
| Python | mit | flibbertigibbet/gtfs-feed-fetcher,azavea/gtfs-feed-fetcher |
385d7a5734e91217e9d8c0464327dedb30a69621 | profile_python.py | profile_python.py | # coding: utf8
# Copyright 2013-2015 Vincent Jacques <[email protected]>
import cProfile as profile
import pstats
import cairo
from DrawTurksHead import TurksHead
stats_filename = "profiling/profile_python.stats"
img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400)
ctx = cairo.Context(img)
ctx.set_source_rgb(1, 1, 0xBF / 255.)
ctx.paint()
ctx.translate(1600, 1200)
ctx.scale(1, -1)
profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename)
img.write_to_png("profiling/reference.png")
p = pstats.Stats(stats_filename)
p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
| # coding: utf8
# Copyright 2013-2015 Vincent Jacques <[email protected]>
import cProfile as profile
import pstats
import cairo
from DrawTurksHead import TurksHead
stats_filename = "/tmp/profile.stats"
img = cairo.ImageSurface(cairo.FORMAT_RGB24, 3200, 2400)
ctx = cairo.Context(img)
ctx.set_source_rgb(1, 1, 0xBF / 255.)
ctx.paint()
ctx.translate(1600, 1200)
ctx.scale(1, -1)
profile.run("TurksHead(24, 18, 190, 1190, 20).draw(ctx)", stats_filename)
img.write_to_png("profiling/reference.png")
p = pstats.Stats(stats_filename)
p.strip_dirs().sort_stats("cumtime").print_stats().print_callees()
| Change name of stats file | Change name of stats file
| Python | mit | jacquev6/DrawTurksHead,jacquev6/DrawTurksHead,jacquev6/DrawTurksHead |
dcd36fab023ac2530cbfa17449e3ce8f61ad6bdc | ssl-cert-parse.py | ssl-cert-parse.py | #!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
print(str(Cert.get_subject())[18:-2])
print(datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ'))
print(datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ'))
print(str(Cert.get_issuer())[18:-2])
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
ParseCert(CertRaw)
| #!/usr/bin/env python3
import datetime
import ssl
import OpenSSL
def GetCert(SiteName, Port):
return ssl.get_server_certificate((SiteName, Port))
def ParseCert(CertRaw):
Cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, CertRaw)
CertSubject = str(Cert.get_subject())[18:-2]
CertStartDate = datetime.datetime.strptime(str(Cert.get_notBefore())[2:-1],
'%Y%m%d%H%M%SZ')
CertEndDate = datetime.datetime.strptime(str(Cert.get_notAfter())[2:-1],
'%Y%m%d%H%M%SZ')
CertIssuer = str(Cert.get_issuer())[18:-2]
return {'CertSubject': CertIssuer, 'CertStartDate': CertStartDate,
'CertEndDate': CertEndDate, 'CertIssuer': CertIssuer}
CertRaw = GetCert('some.domain.tld', 443)
print(CertRaw)
Out = ParseCert(CertRaw)
print(Out)
print(Out['CertSubject'])
print(Out['CertStartDate'])
| Fix ParseCert() function, add variables, add a return statement | Fix ParseCert() function, add variables, add a return statement
| Python | apache-2.0 | ivuk/ssl-cert-parse |
5cb7aaac10c6c8cf818c46b2d0f47fe707e2b93c | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/config/urls.py | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/config/urls.py | import django.views.static
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html')),
url(r'^admin/', include(admin.site.urls)),
{%- if cookiecutter.use_djangocms == 'y' %}
url(r'^', include('cms.urls')),
{%- endif %}
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^media/(?P<path>.*)$', django.views.static.serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^__debug__/', include(debug_toolbar.urls)),
] + staticfiles_urlpatterns() + urlpatterns
| import django.views.static
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html')),
url(r'^admin/', admin.site.urls),
{%- if cookiecutter.use_djangocms == 'y' %}
url(r'^', include('cms.urls')),
{%- endif %}
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^media/(?P<path>.*)$', django.views.static.serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^__debug__/', include(debug_toolbar.urls)),
] + staticfiles_urlpatterns() + urlpatterns
| Update url includes for django 2.0 | Update url includes for django 2.0
| Python | mit | r0x73/django-template,r0x73/django-template,r0x73/django-template |
572dca82aab583e91e5b8402d1334bae55244d16 | hs_tracking/middleware.py | hs_tracking/middleware.py | from .models import Session
class Tracking(object):
"""The default tracking middleware logs all successful responses as a 'visit' variable with
the URL path as its value."""
def process_response(self, request, response):
if response.status_code == 200:
session = Session.objects.for_request(request)
session.record("visit", request.path)
return response
| from .models import Session
class Tracking(object):
"""The default tracking middleware logs all successful responses as a 'visit' variable with
the URL path as its value."""
def process_response(self, request, response):
if request.path.startswith('/heartbeat/'):
return response
if response.status_code == 200:
session = Session.objects.for_request(request)
session.record("visit", request.path)
return response
| Disable use tracking of all heartbeat app urls. | Disable use tracking of all heartbeat app urls.
| Python | bsd-3-clause | RENCI/xDCIShare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,FescueFungiShare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,FescueFungiShare/hydroshare,hydroshare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,hydroshare/hydroshare |
93d3a2f19cfb3ef9ae62d04ce24901df81bafc3e | luigi/rfam/families_csv.py | luigi/rfam/families_csv.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import attr
import luigi
from rfam import utils
from rfam.csv_writer import CsvWriter
class FamiliesCSV(CsvWriter):
headers = [
'id',
'name',
'description',
'clan',
'seed_count',
'full_count',
'length',
'domain',
'is_supressed',
'rna_type',
]
def data(self):
for family in utils.load_families():
data = attr.asdict(family)
data['is_suppressed'] = int(family.is_suppressed)
data['rna_type'] = family.guess_insdc()
yield data
if __name__ == '__main__':
luigi.run(main_task_cls=FamiliesCSV)
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import attr
import luigi
from rfam import utils
from rfam.csv_writer import CsvWriter
class FamiliesCSV(CsvWriter):
headers = [
'id',
'name',
'description',
'clan',
'seed_count',
'full_count',
'length',
'domain',
'is_suppressed',
'rna_type',
]
def data(self):
for family in utils.load_families():
data = attr.asdict(family)
data['name'] = family.pretty_name
data['is_suppressed'] = int(family.is_suppressed)
data['rna_type'] = family.guess_insdc()
yield data
if __name__ == '__main__':
luigi.run(main_task_cls=FamiliesCSV)
| Fix typo and use correct name | Fix typo and use correct name
We want to use the pretty name, not the standard one for import. In
addition, I fix a typo in the name of the the is_suppressed column.
| Python | apache-2.0 | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline |
5aa809aa36bbc32610b9b7998b8606bbfc65508a | swift/__init__.py | swift/__init__.py | import gettext
class Version(object):
def __init__(self, canonical_version, final):
self.canonical_version = canonical_version
self.final = final
@property
def pretty_version(self):
if self.final:
return self.canonical_version
else:
return '%s-dev' % (self.canonical_version,)
_version = Version('1.4.0', False)
__version__ = _version.pretty_version
__canonical_version__ = _version.canonical_version
gettext.install('swift')
| import gettext
class Version(object):
def __init__(self, canonical_version, final):
self.canonical_version = canonical_version
self.final = final
@property
def pretty_version(self):
if self.final:
return self.canonical_version
else:
return '%s-dev' % (self.canonical_version,)
_version = Version('1.4.1', False)
__version__ = _version.pretty_version
__canonical_version__ = _version.canonical_version
gettext.install('swift')
| Switch trunk to 1.4.1, now that the 1.4.0 release branch is branched out | Switch trunk to 1.4.1, now that the 1.4.0 release branch is branched out | Python | apache-2.0 | mjzmjz/swift,iostackproject/IO-Bandwidth-Differentiation,Triv90/SwiftUml,VictorLowther/swift,mjwtom/swift,psachin/swift,hurricanerix/swift,levythu/swift,smerritt/swift,zackmdavis/swift,prashanthpai/swift,IPVL/swift-kilo,zaitcev/swift-lfs,hbhdytf/mac2,Khushbu27/Tutorial,clayg/swift,larsbutler/swift,nadeemsyed/swift,Mirantis/swift-encrypt,matthewoliver/swift,hbhdytf/mac,hbhdytf/mac2,mja054/swift_plugin,anishnarang/gswift,eatbyte/Swift,revoer/keystone-8.0.0,aerwin3/swift,NewpTone/StackLab-swift,maginatics/swift,NeCTAR-RC/swift,notmyname/swift,takeshineshiro/swift,nadeemsyed/swift,tipabu/swift,scality/ScalitySproxydSwift,williamthegrey/swift,smerritt/swift,bradleypj823/swift,levythu/swift,AfonsoFGarcia/swift,rackerlabs/swift,mjzmjz/swift,houseurmusic/my-swift,houseurmusic/my-swift,scality/ScalitySproxydSwift,daasbank/swift,Intel-bigdata/swift,swiftstack/swift,clayg/swift,xiaoguoai/ec-dev-swift,openstack/swift,citrix-openstack-build/swift,NeCTAR-RC/swift,psachin/swift,Seagate/swift,tsli/test,tipabu/swift,larsbutler/swift,NewpTone/StackLab-swift,notmyname/swift,Intel-bigdata/swift,Em-Pan/swift,revoer/keystone-8.0.0,psachin/swift,citrix-openstack/build-swift,dencaval/swift,zackmdavis/swift,prashanthpai/swift,sarvesh-ranjan/swift,nadeemsyed/swift,matthewoliver/swift,shibaniahegde/OpenStak_swift,orion/swift-config,JioCloud/swift,wenhuizhang/swift,ceph/swift,JioCloud/swift,tsli/test,Khushbu27/Tutorial,openstack/swift,SUSE/swift,redhat-openstack/swift,sarvesh-ranjan/swift,mja054/swift_plugin,openstack/swift,redbo/swift,hbhdytf/mac,redbo/swift,zaitcev/swift-lfs,VictorLowther/swift,psachin/swift,bouncestorage/swift,Akanoa/swift,anishnarang/gswift,takeshineshiro/swift,AfonsoFGarcia/swift,gold3bear/swift,hbhdytf/mac2,tipabu/swift,Triv90/SwiftUml,redhat-openstack/swift,Akanoa/swift,clayg/swift,aerwin3/swift,dpgoetz/swift,bkolli/swift,bradleypj823/swift,Seagate/swift,dencaval/swift,SUSE/swift,xiaoguoai/ec-dev-swift,daasbank/swift,openstack/swift,ceph/swift,Mirantis/swift-encrypt,maginatics/swift,matthewoliver/swift,tipabu/swift,hbhdytf/mac2,clayg/swift,smerritt/swift,wenhuizhang/swift,hurricanerix/swift,citrix-openstack-build/swift,rackerlabs/swift,mjwtom/swift,citrix-openstack/build-swift,notmyname/swift,iostackproject/IO-Bandwidth-Differentiation,nadeemsyed/swift,thiagodasilva/swift,eatbyte/Swift,bouncestorage/swift,hurricanerix/swift,gold3bear/swift,shibaniahegde/OpenStak_swift,notmyname/swift,matthewoliver/swift,swiftstack/swift,williamthegrey/swift,hurricanerix/swift,smerritt/swift,orion/swift-config,dpgoetz/swift,IPVL/swift-kilo,swiftstack/swift,thiagodasilva/swift,mja054/swift_plugin,bkolli/swift,Em-Pan/swift |
49155373b9eea3812c295c9d89c40a7c9c1c1c13 | migrations/versions/20170214191843_pubmed_rename_identifiers_list_to_article_ids.py | migrations/versions/20170214191843_pubmed_rename_identifiers_list_to_article_ids.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from alembic import op
# revision identifiers, used by Alembic.
revision = '3dbb46f23ed7'
down_revision = u'0087dc1eb534'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids')
def downgrade():
op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from alembic import op
# revision identifiers, used by Alembic.
revision = '3dbb46f23ed7'
down_revision = u'b32475938a2d'
branch_labels = None
depends_on = None
def upgrade():
op.alter_column('pubmed', 'identifiers_list', new_column_name='article_ids')
def downgrade():
op.alter_column('pubmed', 'article_ids', new_column_name='identifiers_list')
| Fix migrations to have a single path | Fix migrations to have a single path
As it took us a while to merge some PRs, the migrations ended branching in two
parts. This commit fixes to use a single path. It shouldn't cause any issues, as
we're only messing with the `down` migrations and the migrations aren't
dependent on each other.
| Python | mit | opentrials/scraper,opentrials/collectors |
4ccf9226466b716cf1cbe6bdcfd13d7dfe66501e | megalist_dataflow/setup.py | megalist_dataflow/setup.py | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
name='megalist_dataflow',
version='0.1',
author='Alvaro Stivi',
author_email='[email protected]',
url='https://cse.googlesource.com/solutions/megalist',
install_requires=['googleads==20.0.0', 'google-api-python-client==1.7.9',
'bloom-filter==1.3', 'google-cloud-core==1.0.2',
'google-cloud-datastore==1.9.0', 'aiohttp==3.6.2'],
packages=setuptools.find_packages(),
)
| # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
name='megalist_dataflow',
version='0.1',
author='Alvaro Stivi',
author_email='[email protected]',
url='https://cse.googlesource.com/solutions/megalist',
install_requires=['googleads==24.1.0', 'google-api-python-client==1.10.0',
'bloom-filter==1.3', 'google-cloud-core==1.3.0', 'google-cloud-bigquery==1.26.0',
'google-cloud-datastore==1.13.1', 'aiohttp==3.6.2'],
packages=setuptools.find_packages(),
)
| Update Apache Beam runtime dependencies | Update Apache Beam runtime dependencies
| Python | apache-2.0 | google/megalista,google/megalista |
60f9acad7610ee8bed324d1e142cc4801a9e3713 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
AdminControl.getPropertiesForDataSource(dsArray[0]) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
ds = AdminConfig.list('DataSource')
dsArray = ds.splitlines()
test = AdminControl.getPropertiesForDataSource(dsArray[0])
print dsArray
print '\n'
print test | Create script to save documentation to a file | 4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 |
2cbffa60c0b12a268e0347a6a4ecfc6d5acb29e3 | lamor_flexbe_states/src/lamor_flexbe_states/detect_person_state.py | lamor_flexbe_states/src/lamor_flexbe_states/detect_person_state.py | #!/usr/bin/env python
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxySubscriberCached
from geometry_msgs.msg import PoseStamped
class DetectPersonState(EventState):
'''
Detects the nearest person and provides their pose.
-- wait_timeout float Time (seconds) to wait for a person before giving up.
#> person_pose PoseStamped Pose of the nearest person if one is detected, else None.
<= detected Detected a person.
<= not_detected No person detected, but time ran out.
'''
def __init__(self, wait_timeout):
super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected']
output_keys = ['person_pose'])
self._wait_timeout = rospy.Duration(wait_timeout)
self._topic = '/people_tracker/pose'
self._sub = ProxySubscriberCached({self._topic: PoseStamped})
self._start_waiting_time = None
def execute(self, userdata):
if rospy.Time.now() > self._start_waiting_time + self._wait_timeout:
return 'not_detected'
if self._sub.has_msgs(self._topic):
userdata.person_pose = self._sub.get_last_msg(self._topic)
return 'detected'
def on_enter(self, userdata):
self._start_waiting_time = rospy.Time.now()
| #!/usr/bin/env python
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxySubscriberCached
from geometry_msgs.msg import PoseStamped
class DetectPersonState(EventState):
'''
Detects the nearest person and provides their pose.
-- wait_timeout float Time (seconds) to wait for a person before giving up.
#> person_pose PoseStamped Pose of the nearest person if one is detected, else None.
<= detected Detected a person.
<= not_detected No person detected, but time ran out.
'''
def __init__(self, wait_timeout):
super(MetricSweepState, self).__init__(outcomes = ['detected', 'not_detected'],
output_keys = ['person_pose'])
self._wait_timeout = rospy.Duration(wait_timeout)
self._topic = '/people_tracker/pose'
self._sub = ProxySubscriberCached({self._topic: PoseStamped})
self._start_waiting_time = None
def execute(self, userdata):
if rospy.Time.now() > self._start_waiting_time + self._wait_timeout:
userdata.person_pose = None
return 'not_detected'
if self._sub.has_msgs(self._topic):
userdata.person_pose = self._sub.get_last_msg(self._topic)
return 'detected'
def on_enter(self, userdata):
self._start_waiting_time = rospy.Time.now()
| Set person pose to None if no person is present | [lamor_flexbe_state] Set person pose to None if no person is present
| Python | mit | marinaKollmitz/lamor15,pschillinger/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15,marinaKollmitz/lamor15,marinaKollmitz/lamor15,pschillinger/lamor15 |
206a59c838623aae5e0b0f91f8089ffc13e2cfd0 | pipenv/vendor/pythonfinder/environment.py | pipenv/vendor/pythonfinder/environment.py | # -*- coding=utf-8 -*-
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
from typing import TYPE_CHECKING
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
| # -*- coding=utf-8 -*-
from __future__ import print_function, absolute_import
import os
import platform
import sys
def is_type_checking():
try:
from typing import TYPE_CHECKING
except ImportError:
return False
return TYPE_CHECKING
PYENV_INSTALLED = bool(os.environ.get("PYENV_SHELL")) or bool(
os.environ.get("PYENV_ROOT")
)
ASDF_INSTALLED = bool(os.environ.get("ASDF_DIR"))
PYENV_ROOT = os.path.expanduser(
os.path.expandvars(os.environ.get("PYENV_ROOT", "~/.pyenv"))
)
ASDF_DATA_DIR = os.path.expanduser(
os.path.expandvars(os.environ.get("ASDF_DATA_DIR", "~/.asdf"))
)
IS_64BIT_OS = None
SYSTEM_ARCH = platform.architecture()[0]
if sys.maxsize > 2 ** 32:
IS_64BIT_OS = platform.machine() == "AMD64"
else:
IS_64BIT_OS = False
IGNORE_UNSUPPORTED = bool(os.environ.get("PYTHONFINDER_IGNORE_UNSUPPORTED", False))
MYPY_RUNNING = os.environ.get("MYPY_RUNNING", is_type_checking())
| Fix typing check for pythonfinder | Fix typing check for pythonfinder
Signed-off-by: Dan Ryan <[email protected]>
| Python | mit | kennethreitz/pipenv |
c8a1b25c1579eba5cb68c1a4cdd60116b3496429 | pre_commit_robotframework_tidy/rf_tidy.py | pre_commit_robotframework_tidy/rf_tidy.py | from __future__ import print_function
import argparse
from robot.errors import DataError
from robot.tidy import Tidy
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to run')
parser.add_argument('--use-pipes', action='store_true', dest='use_pipes',
default=False)
parser.add_argument('--space-count', type=int, dest='space_count',
default=4)
args = parser.parse_args(argv)
tidier = Tidy(use_pipes=args.use_pipes, space_count=args.space_count)
for filename in args.filenames:
try:
tidier.inplace(filename)
except DataError:
pass
return 0
if __name__ == '__main__':
exit(main())
| from __future__ import print_function
import argparse
from robot.errors import DataError
from robot.tidy import Tidy
def main(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Filenames to run')
parser.add_argument('--use-pipes', action='store_true', dest='use_pipes',
default=False)
parser.add_argument('--space-count', type=int, dest='space_count',
default=4)
args = parser.parse_args(argv)
tidier = Tidy(use_pipes=args.use_pipes,
space_count=args.space_count,
format='robot')
for filename in args.filenames:
try:
tidier.inplace(filename)
except DataError:
pass
return 0
if __name__ == '__main__':
exit(main())
| Format results as robot files | Format results as robot files
| Python | mit | guykisel/pre-commit-robotframework-tidy |
ff011f280f6f6aaf74dd2f4ff3cdfb3831aa147c | ideskeleton/builder.py | ideskeleton/builder.py | def build(source_path, overwrite = True):
pass | import os.path
def build(source_path, overwrite = True):
if not os.path.exists(source_path):
raise IOError("source_path does not exist so not skeleton can be built")
'''
for root, dirs, files in os.walk("."):
path = root.split('/')
print (len(path) - 1) *'---' , os.path.basename(root)
for file in files:
print len(path)*'---', file
''' | Make the first test pass by checking if source_path argument exists | Make the first test pass by checking if source_path argument exists
| Python | mit | jruizaranguren/ideskeleton |
3bbe101f609349c2475079f052d5400e77822237 | common/my_filters.py | common/my_filters.py | from google.appengine.ext import webapp
import re
# More info on custom Django template filters here:
# https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters
register = webapp.template.create_template_register()
@register.filter
def digits(value):
return re.sub('[^0-9]', '', value)
@register.filter
def mul(value, arg):
return value * arg
@register.filter
def yt_start(value):
return value.replace("?t=", "?start=")
| from google.appengine.ext import webapp
from helpers.youtube_video_helper import YouTubeVideoHelper
import re
# More info on custom Django template filters here:
# https://docs.djangoproject.com/en/dev/howto/custom-template-tags/#registering-custom-filters
register = webapp.template.create_template_register()
@register.filter
def digits(value):
return re.sub('[^0-9]', '', value)
@register.filter
def mul(value, arg):
return value * arg
@register.filter
def yt_start(value):
if '?t=' in value: # Treat ?t= the same as #t=
value = value.replace('?t=', '#t=')
if '#t=' in value:
sp = value.split('#t=')
video_id = sp[0]
old_ts = sp[1]
total_seconds = YouTubeVideoHelper.time_to_seconds(old_ts)
value = '%s?start=%i' % (video_id, total_seconds)
return value
| Fix video suggestion review showing wrong time | Fix video suggestion review showing wrong time
| Python | mit | nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance |
34575124ea6b16f7a7d4f2ae5e073a87709843d2 | engine/meta.py | engine/meta.py | registered = {}
class GObjectMeta(type):
def __new__(cls, name, bases, dict):
c = super().__new__(cls, name, bases, dict)
qualname = '{}.{}'.format(c.__module__, c.__qualname__)
if qualname in registered:
print(cls, qualname)
c = type(name, (registered[qualname], c), {})
return c
def register(name):
def decorator(cls):
registered[name] = cls
return cls
return decorator
| registered = {}
created = {}
class GObjectMeta(type):
def __new__(cls, name, bases, dict):
c = super().__new__(cls, name, bases, dict)
# Do not handle classes that are already decorated
if c.__module__.startswith('<meta>'):
return c
# Fullname of the class (base module + qualified name)
fullname = '{}.{}'.format(c.__module__, c.__qualname__)
# Decorate registered classes
if fullname in registered:
print(cls, fullname)
c = type(name,
(registered[fullname], c),
{'__module__': '<meta>.{}'.format(fullname)})
# Set fullname, save class and return
c.__fullname__ = fullname
created[fullname] = c
return c
def register(name):
def decorator(cls):
registered[name] = cls
return cls
return decorator
| Add __fullname__ attribute on all game classes | Add __fullname__ attribute on all game classes
| Python | bsd-3-clause | entwanne/NAGM |
95ccab69cfff30c24932c4cd156983a29639435d | nginxauthdaemon/crowdauth.py | nginxauthdaemon/crowdauth.py | import crowd
from auth import Authenticator
class CrowdAuthenticator(Authenticator):
"""Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD"""
def __init__(self, config):
super(CrowdAuthenticator, self).__init__(config)
app_url = config['CROWD_URL']
app_user = config['CROWD_APP_NAME']
app_pass = config['CROWD_APP_PASSWORD']
self._cs = crowd.CrowdServer(app_url, app_user, app_pass)
def authenticate(self, username, password):
result = self._cs.auth_user(username, password)
return result.get('name') == username
| import crowd
from auth import Authenticator
class CrowdAuthenticator(Authenticator):
"""Atlassian Crowd authenticator. Requires configuration options CROWD_URL, CROWD_APP_NAME, CROWD_APP_PASSWORD"""
def __init__(self, config):
super(CrowdAuthenticator, self).__init__(config)
app_url = config['CROWD_URL']
app_user = config['CROWD_APP_NAME']
app_pass = config['CROWD_APP_PASSWORD']
self._cs = crowd.CrowdServer(app_url, app_user, app_pass)
def authenticate(self, username, password):
result = self._cs.auth_user(username, password)
if result == None:
# auth failed
return False
# auth succeeded
return result.get('name') == username
| Fix 500 error when Crowd auth is failed | Fix 500 error when Crowd auth is failed
| Python | mit | akurdyukov/nginxauthdaemon,akurdyukov/nginxauthdaemon |
6c2a154bf902b5f658b2c2cbf4b65c6ed33e6c1b | pywineds/utils.py | pywineds/utils.py |
"""
Exposes utility functions.
"""
from contextlib import contextmanager
import logging
import timeit
log = logging.getLogger("wineds")
@contextmanager
def time_it(task_desc):
"""
A context manager for timing chunks of code and logging it.
Arguments:
task_desc: task description for logging purposes
"""
start_time = timeit.default_timer()
yield
elapsed = timeit.default_timer() - start_time
log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
|
"""
Exposes utility functions.
"""
from contextlib import contextmanager
import logging
import timeit
REPORTING_TYPE_ALL = ""
REPORTING_TYPE_ELD = "TC-Election Day Reporting"
REPORTING_TYPE_VBM = "TC-VBM Reporting"
REPORTING_KEYS_SIMPLE = (REPORTING_TYPE_ALL, )
REPORTING_KEYS_COMPLETE = (REPORTING_TYPE_ELD, REPORTING_TYPE_VBM)
REPORTING_INDICES = {
REPORTING_TYPE_ALL: 0,
REPORTING_TYPE_ELD: 1,
REPORTING_TYPE_VBM: 2,
}
log = logging.getLogger("wineds")
@contextmanager
def time_it(task_desc):
"""
A context manager for timing chunks of code and logging it.
Arguments:
task_desc: task description for logging purposes
"""
start_time = timeit.default_timer()
yield
elapsed = timeit.default_timer() - start_time
log.info("elapsed (%s): %.4f seconds" % (task_desc, elapsed))
| Add some reporting_type global variables. | Add some reporting_type global variables.
| Python | bsd-3-clause | cjerdonek/wineds-converter |
febd7d8b113b8ef3ac9a843b873f4be5b203b53c | apps/bluebottle_utils/models.py | apps/bluebottle_utils/models.py | from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
| from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
line1 = models.CharField(max_length=100, blank=True)
line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.line1[:80]
class Meta:
abstract = True
| Update __unicode__ method in Address model for previous changes. | Update __unicode__ method in Address model for previous changes.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site |
3b7658dd7783b789cd4632caa94f0ba044305418 | bugsnag/handlers.py | bugsnag/handlers.py | from __future__ import division, print_function, absolute_import
import logging
import bugsnag
class BugsnagHandler(logging.Handler):
def __init__(self, api_key=None):
super(BugsnagHandler, self).__init__()
# Check if API key has been specified.
if not bugsnag.configuration.api_key and not api_key:
raise Exception, "API key has not been configured or specified"
# If API key specified in logger, always use this value, even if another
# value was configured previously
if api_key:
bugsnag.configuration.api_key = api_key
def emit(self, record):
if record.levelname.lower() in ['error', 'critical']:
severity = 'error'
elif record.levelname.lower() in ['warning', ]:
severity = 'warning'
else:
severity = 'info'
if record.exc_info:
bugsnag.notify(record.exc_info, severity=severity)
else:
exc = Exception(record.message)
bugsnag.notify(exc, severity=severity) | from __future__ import division, print_function, absolute_import
import logging
import bugsnag
class BugsnagHandler(logging.Handler):
def __init__(self, api_key=None):
super(BugsnagHandler, self).__init__()
# Check if API key has been specified.
if not bugsnag.configuration.api_key and not api_key:
raise Exception, "API key has not been configured or specified"
# If API key specified in logger, always use this value, even if another
# value was configured previously
if api_key:
bugsnag.configuration.api_key = api_key
def emit(self, record):
if record.levelname.lower() in ['error', 'critical']:
severity = 'error'
elif record.levelname.lower() in ['warning', ]:
severity = 'warning'
else:
severity = 'info'
if record.exc_info:
bugsnag.notify(record.exc_info, severity=severity)
else:
# Only extract a few specific fields, as we don't want to
# repeat data already being sent over the wire (such as exc)
record_fields = ['asctime', 'created', 'levelname', 'levelno', 'msecs',
'name', 'process', 'processName', 'relativeCreated', 'thread',
'threadName', ]
extra_data = dict([ ( field, getattr(record, field) )
for field in record_fields ])
# Create exception type dynamically, to prevent bugsnag.handlers
# being prepended to the exception name due to class name
# detection in utils. Because we are messing with the module
# internals, we don't really want to expose this class anywhere
exc_type = type('LogMessage', (Exception, ), {})
exc = exc_type(record.message)
exc.__module__ = '__main__'
bugsnag.notify(exc, severity=severity, extra_data=extra_data) | Send partial LogRecord event in extra ata | Send partial LogRecord event in extra ata
| Python | mit | overplumbum/bugsnag-python,overplumbum/bugsnag-python,bugsnag/bugsnag-python,bugsnag/bugsnag-python |
5cc0931b0b636c627efa3ada9abae29528799554 | machine/pentium/translator.py | machine/pentium/translator.py | '''Module for handling assembly language code.'''
import sys
import traceback
from transf import transformation
from transf import parse
from machine.pentium.data import *
from machine.pentium.binary import *
from machine.pentium.logical import *
from machine.pentium.shift import *
from machine.pentium.control import *
from machine.pentium.flag import *
from machine.pentium.misc import *
from machine.pentium.simplify import simplify
class OpcodeDispatch(transformation.Transformation):
"""Transformation to quickly dispatch the transformation to the appropriate
transformation."""
def apply(self, trm, ctx):
if not trm.rmatch('Asm(_, [*])'):
raise exception.Failure
opcode, operands = trm.args
opcode = opcode.value
try:
trf = eval("asm" + opcode.upper())
except NameError:
sys.stderr.write("warning: don't now how to translate opcode '%s'\n" % opcode)
raise transf.exception.Failure
try:
return trf.apply(operands, ctx)
except exception.Failure:
sys.stderr.write("warning: failed to translate opcode '%s'\n" % opcode)
traceback.print_exc()
raise
parse.Transfs('''
doStmt =
?Asm(opcode, _) & (
OpcodeDispatch() +
![<id>]
) ;
Try(simplify)
+ ![<id>]
doModule =
~Module(<lists.MapConcat(doStmt)>)
''')
| '''Module for handling assembly language code.'''
import sys
import traceback
from transf import transformation
from transf import parse
import ir.check
from machine.pentium.data import *
from machine.pentium.binary import *
from machine.pentium.logical import *
from machine.pentium.shift import *
from machine.pentium.control import *
from machine.pentium.flag import *
from machine.pentium.misc import *
from machine.pentium.simplify import simplify
class OpcodeDispatch(transformation.Transformation):
"""Transformation to quickly dispatch the transformation to the appropriate
transformation."""
def apply(self, trm, ctx):
if not trm.rmatch('Asm(_, [*])'):
raise exception.Failure
opcode, operands = trm.args
opcode = opcode.value
try:
trf = eval("asm" + opcode.upper())
except NameError:
sys.stderr.write("warning: don't now how to translate opcode '%s'\n" % opcode)
raise transf.exception.Failure
try:
return trf.apply(operands, ctx)
except exception.Failure:
sys.stderr.write("warning: failed to translate opcode '%s'\n" % opcode)
traceback.print_exc()
raise
parse.Transfs('''
doStmt =
?Asm(opcode, _) & (
OpcodeDispatch() & Map(ir.check.stmt) +
![<id>]
) ;
Try(simplify)
+ ![<id>]
doModule =
~Module(<lists.MapConcat(doStmt)>)
''', debug=False)
| Check AST after translating statements. | Check AST after translating statements.
| Python | lgpl-2.1 | mewbak/idc,mewbak/idc |
21e9254abeebb7260f74db9c94e480cc2b5bbcc9 | tests/conftest.py | tests/conftest.py | import pytest
@pytest.fixture(scope='session')
def base_url(base_url, request):
return base_url or 'https://developer.allizom.org'
| import pytest
VIEWPORT = {
'large': {'width': 1201, 'height': 1024}, # also nav-break-ends
'desktop': {'width': 1025, 'height': 1024},
'tablet': {'width': 851, 'height': 1024}, # also nav-block-ends
'mobile': {'width': 481, 'height': 1024},
'small': {'width': 320, 'height': 480}}
@pytest.fixture(scope='session')
def base_url(base_url, request):
return base_url or 'https://developer.allizom.org'
@pytest.fixture
def selenium(request, selenium):
viewport = VIEWPORT['large']
if request.keywords.get('viewport') is not None:
viewport = VIEWPORT[request.keywords.get('viewport').args[0]]
selenium.set_window_size(viewport['width'], viewport['height'])
return selenium
| Add viewport sizes fixture to tests. | Add viewport sizes fixture to tests.
| Python | mpl-2.0 | safwanrahman/kuma,Elchi3/kuma,mozilla/kuma,jwhitlock/kuma,SphinxKnight/kuma,SphinxKnight/kuma,Elchi3/kuma,mozilla/kuma,SphinxKnight/kuma,a2sheppy/kuma,safwanrahman/kuma,Elchi3/kuma,mozilla/kuma,yfdyh000/kuma,yfdyh000/kuma,SphinxKnight/kuma,safwanrahman/kuma,a2sheppy/kuma,yfdyh000/kuma,yfdyh000/kuma,safwanrahman/kuma,SphinxKnight/kuma,jwhitlock/kuma,safwanrahman/kuma,SphinxKnight/kuma,a2sheppy/kuma,a2sheppy/kuma,escattone/kuma,Elchi3/kuma,Elchi3/kuma,jwhitlock/kuma,jwhitlock/kuma,safwanrahman/kuma,escattone/kuma,escattone/kuma,a2sheppy/kuma,jwhitlock/kuma,yfdyh000/kuma,mozilla/kuma,mozilla/kuma,yfdyh000/kuma |
8691191000af0ad736f6b4bedf720e972ab962a8 | tests/conftest.py | tests/conftest.py | # Do not run actual tests in linter environments.
def pytest_collection_modifyitems(session, config, items):
for linter in ["flake8", "black", "isort"]:
try:
if config.getoption("--" + linter):
items[:] = [item for item in items if item.get_marker(linter)]
except ValueError:
# An old python version without installed linter.
pass
| # Do not run actual tests in linter environments.
def pytest_collection_modifyitems(session, config, items):
for linter in ["flake8", "black", "isort"]:
try:
if config.getoption("--" + linter):
items[:] = [item for item in items if item.get_closest_marker(linter)]
except ValueError:
# An old python version without installed linter.
pass
| Correct pytest linter test collection. | Correct pytest linter test collection.
| Python | bsd-2-clause | proofit404/dependencies,proofit404/dependencies,proofit404/dependencies,proofit404/dependencies |
534633d078fe6f81e67ead075ac31faac0c3c60d | tests/__init__.py | tests/__init__.py | import pycurl
def setup_package():
print('Testing %s' % pycurl.version)
| def setup_package():
# import here, not globally, so that running
# python -m tests.appmanager
# to launch the app manager is possible without having pycurl installed
# (as the test app does not depend on pycurl)
import pycurl
print('Testing %s' % pycurl.version)
| Make it possible to run test app without pycurl being installed | Make it possible to run test app without pycurl being installed
| Python | lgpl-2.1 | pycurl/pycurl,pycurl/pycurl,pycurl/pycurl |
0909837d93c44acc7374a7dee50e7eb869999e92 | geotrek/settings/dev.py | geotrek/settings/dev.py | from .default import * # NOQA
#
# Django Development
# ..........................
DEBUG = True
TEMPLATE_DEBUG = True
SOUTH_TESTS_MIGRATE = False # Tested at settings.tests
#
# Developper additions
# ..........................
INSTALLED_APPS = (
'debug_toolbar',
'django_extensions',
) + INSTALLED_APPS
INTERNAL_IPS = (
'127.0.0.1', # localhost default
'10.0.3.1', # lxc default
)
#
# Use some default tiles
# ..........................
LEAFLET_CONFIG['TILES'] = [
(gettext_noop('Scan'), 'http://{s}.tile.osm.org/{z}/{x}/{y}.png', '(c) OpenStreetMap Contributors'),
(gettext_noop('Ortho'), 'http://{s}.tiles.mapbox.com/v3/openstreetmap.map-4wvf9l0l/{z}/{x}/{y}.jpg', '(c) MapBox'),
]
LEAFLET_CONFIG['OVERLAYS'] = [
(gettext_noop('Coeur de parc'), 'http://{s}.tilestream.makina-corpus.net/v2/coeur-ecrins/{z}/{x}/{y}.png', 'Ecrins'),
]
LEAFLET_CONFIG['SRID'] = 3857
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| from .default import * # NOQA
#
# Django Development
# ..........................
DEBUG = True
TEMPLATE_DEBUG = True
SOUTH_TESTS_MIGRATE = False # Tested at settings.tests
#
# Developper additions
# ..........................
INSTALLED_APPS = (
# 'debug_toolbar',
'django_extensions',
) + INSTALLED_APPS
INTERNAL_IPS = (
'127.0.0.1', # localhost default
'10.0.3.1', # lxc default
)
#
# Use some default tiles
# ..........................
LEAFLET_CONFIG['TILES'] = [
(gettext_noop('Scan'), 'http://{s}.tile.osm.org/{z}/{x}/{y}.png', '(c) OpenStreetMap Contributors'),
(gettext_noop('Ortho'), 'http://{s}.tiles.mapbox.com/v3/openstreetmap.map-4wvf9l0l/{z}/{x}/{y}.jpg', '(c) MapBox'),
]
LEAFLET_CONFIG['OVERLAYS'] = [
(gettext_noop('Coeur de parc'), 'http://{s}.tilestream.makina-corpus.net/v2/coeur-ecrins/{z}/{x}/{y}.png', 'Ecrins'),
]
LEAFLET_CONFIG['SRID'] = 3857
LOGGING['loggers']['geotrek']['level'] = 'DEBUG'
LOGGING['loggers']['']['level'] = 'DEBUG'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| Disable debug toolbar. It creates import problems | Disable debug toolbar. It creates import problems
| Python | bsd-2-clause | makinacorpus/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin |
f4479b3aa7828e646e3a26493a1b8dfe9174e209 | betty/conf/urls.py | betty/conf/urls.py | try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from .app import settings
from django.conf.urls.static import static
try:
from django.conf.urls import include, patterns, url
except ImportError:
# django < 1.5 compat
from django.conf.urls.defaults import include, patterns, url # noqa
image_path = urlparse(settings.BETTY_IMAGE_URL).path
if image_path.startswith("/"):
image_path = image_path[1:]
urlpatterns = patterns('',
url(r'^{0}'.format(image_path), include("betty.cropper.urls")), # noqa
url(r'browser/', include("betty.image_browser.urls")),
url(r'login/', "django.contrib.auth.views.login")
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from .app import settings
from django.conf.urls.static import static
try:
from django.conf.urls import include, patterns, url
except ImportError:
# django < 1.5 compat
from django.conf.urls.defaults import include, patterns, url # noqa
image_path = urlparse(settings.BETTY_IMAGE_URL).path
if image_path.startswith("/"):
image_path = image_path[1:]
if not image_path.endswith("/"):
image_path += "/"
urlpatterns = patterns('',
url(r'^{0}'.format(image_path), include("betty.cropper.urls")), # noqa
url(r'browser/', include("betty.image_browser.urls")),
url(r'login/', "django.contrib.auth.views.login")
)
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| Append trailing slash to BETTY_IMAGE_URL if not present | Append trailing slash to BETTY_IMAGE_URL if not present
| Python | mit | theonion/betty-cropper,theonion/betty-cropper,theonion/betty-cropper,theonion/betty-cropper |
1b9622cedecef0c6c45c11a84bd178adcff752e2 | squadron/exthandlers/download.py | squadron/exthandlers/download.py | import urllib
from extutils import get_filename
from template import render
import requests
import yaml
import jsonschema
SCHEMA = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'description': 'Describes the extract extension handler input',
'type':'object',
'properties': {
'url': {
'description': 'Where to download the tarball/zip/etc from',
'type':'string'
},
'username': {
'description': 'Username to login with BASIC Auth',
'type':'string'
},
'password': {
'description': 'Password to use with BASIC Auth',
'type':'string'
}
},
'required': ['url']
}
def _download_file(url, handle, auth=None):
r = requests.get(url, auth=auth, stream=True)
for chunk in r.iter_content(chunk_size=4096):
if chunk: # filter out keep-alive new chunks
handle.write(chunk)
handle.close()
def ext_download(loader, inputhash, abs_source, dest, **kwargs):
""" Downloads a ~download file"""
contents = yaml.load(render(abs_source, inputhash, loader))
jsonschema.validate(contents, SCHEMA)
finalfile = get_filename(dest)
handle = open(finalfile, 'w')
auth = None
if 'username' in contents and 'password' in contents:
auth = (contents['username'], contents['password'])
_download_file(contents['url'], handle, auth)
return finalfile
| import urllib
from extutils import get_filename
from template import render
import requests
import yaml
import jsonschema
SCHEMA = {
'$schema': 'http://json-schema.org/draft-04/schema#',
'description': 'Describes the extract extension handler input',
'type':'object',
'properties': {
'url': {
'description': 'Where to download the tarball/zip/etc from',
'type':'string'
},
'username': {
'description': 'Username to login with BASIC Auth',
'type':'string'
},
'password': {
'description': 'Password to use with BASIC Auth',
'type':'string'
}
},
'required': ['url']
}
def _download_file(url, handle, auth=None):
r = requests.get(url, auth=auth, stream=True)
r.raise_for_status()
for chunk in r.iter_content(chunk_size=4096):
if chunk: # filter out keep-alive new chunks
handle.write(chunk)
handle.close()
def ext_download(loader, inputhash, abs_source, dest, **kwargs):
""" Downloads a ~download file"""
contents = yaml.load(render(abs_source, inputhash, loader))
jsonschema.validate(contents, SCHEMA)
finalfile = get_filename(dest)
handle = open(finalfile, 'w')
auth = None
if 'username' in contents and 'password' in contents:
auth = (contents['username'], contents['password'])
_download_file(contents['url'], handle, auth)
return finalfile
| Raise Exception when there's an HTTP error | Raise Exception when there's an HTTP error
| Python | mit | gosquadron/squadron,gosquadron/squadron |
7e51d073952d10d3994fb93458e60c03b6746099 | app/services/g6importService.py | app/services/g6importService.py | import json
import jsonschema
from flask import current_app
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
#current_app.logger.info('Validating JSON:' + str(submitted_json))
try:
validate(submitted_json, G6_SCS_SCHEMA)
return 'G6-SCS'
except jsonschema.ValidationError as e1:
try:
validate(submitted_json, G6_SAAS_SCHEMA)
return 'G6-SaaS'
except jsonschema.ValidationError as e2:
try:
validate(submitted_json, G6_IAAS_SCHEMA)
return 'G6-IaaS'
except jsonschema.ValidationError as e3:
try:
validate(submitted_json, G6_PAAS_SCHEMA)
return 'G6-PaaS'
except jsonschema.ValidationError as e4:
print e4.message
print 'Failed validation'
return False
else:
return True
| import json
import jsonschema
from jsonschema import validate
with open("schemata/g6-scs-schema.json") as json_file1:
G6_SCS_SCHEMA = json.load(json_file1)
with open("schemata/g6-saas-schema.json") as json_file2:
G6_SAAS_SCHEMA = json.load(json_file2)
with open("schemata/g6-iaas-schema.json") as json_file3:
G6_IAAS_SCHEMA = json.load(json_file3)
with open("schemata/g6-paas-schema.json") as json_file4:
G6_PAAS_SCHEMA = json.load(json_file4)
def validate_json(submitted_json):
if validates_against_schema(G6_SCS_SCHEMA,submitted_json):
return 'G6-SCS'
elif validates_against_schema(G6_SAAS_SCHEMA,submitted_json):
return 'G6-SaaS'
elif validates_against_schema(G6_PAAS_SCHEMA,submitted_json):
return 'G6-PaaS'
elif validates_against_schema(G6_IAAS_SCHEMA,submitted_json):
return 'G6-IaaS'
else:
print 'Failed validation'
return False
def validates_against_schema(schema, submitted_json):
try:
validate(submitted_json, schema)
except jsonschema.ValidationError:
return False
else:
return True
| Improve code by avoiding flow through exception handling | Improve code by avoiding flow through exception handling
| Python | mit | RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,mtekel/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,alphagov/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api,RichardKnop/digitalmarketplace-api,mtekel/digitalmarketplace-api |
b2aa4e4375dfea9c20a8242edeed495e649acc1a | vroom/graphics.py | vroom/graphics.py | import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (50, 50, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 50, 50)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| import pygame
class Graphic:
car_color = (255, 50, 50)
car_width = 3
road_color = (255, 255, 255)
road_width = 6
draw_methods = {
'Car': 'draw_car',
'Road': 'draw_road',
}
def __init__(self, surface):
self.surface = surface
def draw(self, obj):
object_class = obj.__class__.__name__
method_name = self.draw_methods.get(object_class, None)
if method_name:
method = getattr(self, method_name)
method(obj)
def draw_car(self, car):
coord = car.coordinates
acceleration_rate = car.acceleration_rate
rect = pygame.Rect(coord.x, coord.y,
self.car_width, self.car_width)
# Change car color depending on acceleration
if acceleration_rate > 0:
rate = min(1, acceleration_rate)
color = (0, 0, int(rate * 255))
else:
rate = max(-1, acceleration_rate)
color = (int(-rate * 255), 0, 0)
pygame.draw.rect(self.surface, color, rect, 0)
def draw_road(self, road):
pointlist = road.pointlist()
closed = False
pygame.draw.lines(self.surface, self.road_color, closed, pointlist,
self.road_width)
| Make car colors easier to read | Make car colors easier to read
| Python | mit | thibault/vroom |
135d4ff79a9a650442548fa5acf44f2dbcd20c0e | voltron/common.py | voltron/common.py | import os
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = os.path.join(VOLTRON_DIR, 'config')
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
| import os
import logging
import logging.config
LOG_CONFIG = {
'version': 1,
'formatters': {
'standard': {'format': 'voltron: [%(levelname)s] %(message)s'}
},
'handlers': {
'default': {
'class': 'logging.StreamHandler',
'formatter': 'standard'
}
},
'loggers': {
'voltron': {
'handlers': ['default'],
'level': 'INFO',
'propogate': True,
}
}
}
VOLTRON_DIR = os.path.expanduser('~/.voltron/')
VOLTRON_CONFIG = os.path.join(VOLTRON_DIR, 'config')
def configure_logging():
logging.config.dictConfig(LOG_CONFIG)
log = logging.getLogger('voltron')
return log
# Python 3 shims
if not hasattr(__builtins__, "xrange"):
xrange = range
| Create some shims for py3k | Create some shims for py3k
| Python | mit | snare/voltron,snare/voltron,snare/voltron,snare/voltron |
05ed4d54d48ddf6540f8dc0d162e4fc95337dbb6 | blah/commands.py | blah/commands.py | import os
import subprocess
import sys
def find_command(name):
return commands[name]
def what_is_this_command():
repository = find_current_repository()
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.path)
def find_current_repository():
directory = os.getcwd()
while directory is not None:
files = os.listdir(directory)
if ".git" in files:
return Repository(os.path.join(directory, ".git"), "git")
directory = parent(directory)
return None
class Repository(object):
def __init__(self, repo_path, repo_type):
self.path = repo_path
self.type = repo_type
def parent(file_path):
parent = os.path.dirname(file_path)
if file_path == parent:
return None
else:
return parent
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command
}
| import os
import subprocess
import sys
def find_command(name):
return commands[name]
def what_is_this_command():
directory = sys.argv[2] if len(sys.argv) > 2 else os.getcwd()
repository = find_repository(directory)
if repository is None:
print "Could not find source control repository"
else:
print "{0}+file://{1}".format(repository.type, repository.path)
def find_repository(directory):
directory = os.path.abspath(directory)
while directory is not None:
files = os.listdir(directory)
if ".git" in files:
return Repository(os.path.join(directory, ".git"), "git")
directory = parent(directory)
return None
class Repository(object):
def __init__(self, repo_path, repo_type):
self.path = repo_path
self.type = repo_type
def parent(file_path):
parent = os.path.dirname(file_path)
if file_path == parent:
return None
else:
return parent
commands = {
"whatisthis": what_is_this_command,
"what-is-this": what_is_this_command
}
| Allow path to be explicitly set when using what-is-this | Allow path to be explicitly set when using what-is-this
| Python | bsd-2-clause | mwilliamson/mayo |
675364683c5415f1db7a5599d8ad97f72f69aaf0 | buckets/utils.py | buckets/utils.py | import string
import random
from django.conf import settings
def validate_settings():
assert settings.AWS, \
"No AWS settings found"
assert settings.AWS.get('ACCESS_KEY'), \
"AWS access key is not set in settings"
assert settings.AWS.get('SECRET_KEY'), \
"AWS secret key is not set in settings"
assert settings.AWS.get('BUCKET'), \
"AWS bucket name is not set in settings"
ID_FIELD_LENGTH = 24
alphabet = string.ascii_lowercase + string.digits
for loser in 'l1o0':
i = alphabet.index(loser)
alphabet = alphabet[:i] + alphabet[i + 1:]
def byte_to_base32_chr(byte):
return alphabet[byte & 31]
def random_id():
rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)]
return ''.join(map(byte_to_base32_chr, rand_id))
| import string
import random
from django.conf import settings
def validate_settings():
assert settings.AWS, \
"No AWS settings found"
assert settings.AWS.get('ACCESS_KEY'), \
"AWS access key is not set in settings"
assert settings.AWS.get('SECRET_KEY'), \
"AWS secret key is not set in settings"
assert settings.AWS.get('BUCKET'), \
"AWS bucket name is not set in settings"
ID_FIELD_LENGTH = 24
alphabet = string.ascii_lowercase + string.digits
alphabet0 = string.ascii_lowercase + string.ascii_lowercase
for loser in 'l1o0':
i = alphabet.index(loser)
alphabet = alphabet[:i] + alphabet[i + 1:]
for loser in 'lo':
i = alphabet0.index(loser)
alphabet0 = alphabet0[:i] + alphabet0[i + 1:]
def byte_to_base32_chr(byte):
return alphabet[byte & 31]
def byte_to_letter(byte):
return alphabet0[byte & 31]
def random_id():
rand_id = [random.randint(0, 0xFF) for i in range(ID_FIELD_LENGTH)]
return (byte_to_letter(rand_id[0]) +
''.join(map(byte_to_base32_chr, rand_id[1:])))
| Make random IDs start with a letter | Make random IDs start with a letter
| Python | agpl-3.0 | Cadasta/django-buckets,Cadasta/django-buckets,Cadasta/django-buckets |
c298abfddbdf83a12ab6c826dbb3562fb358e963 | foyer/tests/test_performance.py | foyer/tests/test_performance.py | import mbuild as mb
import parmed as pmd
import pytest
from foyer import Forcefield
from foyer.tests.utils import get_fn
from foyer.utils.io import has_mbuild
@pytest.mark.timeout(1)
def test_fullerene():
fullerene = pmd.load_file(get_fn('fullerene.pdb'), structure=True)
forcefield = Forcefield(get_fn('fullerene.xml'))
forcefield.apply(fullerene, assert_dihedral_params=False)
@pytest.mark.skipif(not has_mbuild, reason="mbuild is not installed")
@pytest.mark.timeout(15)
def test_surface():
surface = mb.load(get_fn('silica.mol2'))
forcefield = Forcefield(get_fn('opls-silica.xml'))
forcefield.apply(surface, assert_bond_params=False)
@pytest.mark.skipif(not has_mbuild, reason="mbuild is not installed")
@pytest.mark.timeout(60)
def test_polymer():
peg100 = mb.load(get_fn('peg100.mol2'))
forcefield = Forcefield(name='oplsaa')
forcefield.apply(peg100)
| import mbuild as mb
import parmed as pmd
import pytest
from foyer import Forcefield
from foyer.tests.utils import get_fn
from foyer.utils.io import has_mbuild
@pytest.mark.timeout(1)
def test_fullerene():
fullerene = pmd.load_file(get_fn('fullerene.pdb'), structure=True)
forcefield = Forcefield(get_fn('fullerene.xml'))
forcefield.apply(fullerene, assert_dihedral_params=False)
@pytest.mark.skipif(not has_mbuild, reason="mbuild is not installed")
@pytest.mark.timeout(15)
def test_surface():
surface = mb.load(get_fn('silica.mol2'))
forcefield = Forcefield(get_fn('opls-silica.xml'))
forcefield.apply(surface, assert_bond_params=False)
@pytest.mark.skipif(not has_mbuild, reason="mbuild is not installed")
@pytest.mark.timeout(45)
def test_polymer():
peg100 = mb.load(get_fn('peg100.mol2'))
forcefield = Forcefield(name='oplsaa')
forcefield.apply(peg100)
| Drop a timeout time to its previous value | Drop a timeout time to its previous value
| Python | mit | mosdef-hub/foyer,iModels/foyer,mosdef-hub/foyer,iModels/foyer |
c3e2fccbc2a7afa0d146041c0b3392dd89aa5deb | analysis/plot-marker-trajectories.py | analysis/plot-marker-trajectories.py | import climate
import lmj.plot
import numpy as np
import source
import plots
@climate.annotate(
root='load experiment data from this directory',
pattern=('plot data from files matching this pattern', 'option'),
markers=('plot traces of these markers', 'option'),
spline=('interpolate data with a spline of this order', 'option', None, int),
accuracy=('fit spline with this accuracy', 'option', None, float),
)
def main(root,
pattern='*/*block00/*circuit00.csv.gz',
markers='r-fing-index l-fing-index r-heel r-knee',
spline=1,
accuracy=1):
with plots.space() as ax:
for t in source.Experiment(root).trials_matching(pattern):
t.normalize(order=spline, accuracy=accuracy)
for i, marker in enumerate(markers.split()):
df = t.trajectory(marker)
ax.plot(np.asarray(df.x),
np.asarray(df.z),
zs=np.asarray(df.y),
color=lmj.plot.COLOR11[i],
alpha=0.7)
if __name__ == '__main__':
climate.call(main)
| import climate
import lmj.plot
import numpy as np
import source
import plots
@climate.annotate(
root='load experiment data from this directory',
pattern=('plot data from files matching this pattern', 'option'),
markers=('plot traces of these markers', 'option'),
spline=('interpolate data with a spline of this order', 'option', None, int),
accuracy=('fit spline with this accuracy', 'option', None, float),
svt_threshold=('trajectory-SVT threshold', 'option', None, float),
svt_frames=('number of trajectory-SVT frames', 'option', None, int),
)
def main(root,
pattern='*/*block00/*circuit00.csv.gz',
markers='r-fing-index l-fing-index r-heel r-knee',
spline=None,
accuracy=0.01,
svt_threshold=1000,
svt_frames=5):
with plots.space() as ax:
for t in source.Experiment(root).trials_matching(pattern):
if spline:
t.normalize(order=spline, accuracy=accuracy)
else:
t.reindex()
t.svt(svt_threshold, accuracy, svt_frames)
for i, marker in enumerate(markers.split()):
df = t.trajectory(marker)
ax.plot(np.asarray(df.x),
np.asarray(df.z),
zs=np.asarray(df.y),
color=lmj.plot.COLOR11[i],
alpha=0.7)
if __name__ == '__main__':
climate.call(main)
| Add SVT options to plotting script. | Add SVT options to plotting script.
| Python | mit | lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment |
d06adea5117eb3ebfddd8592889346089c7391f7 | dictlearn/wordnik_api_demo.py | dictlearn/wordnik_api_demo.py | from wordnik import swagger, WordApi, AccountApi
client = swagger.ApiClient(
'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31',
'https://api.wordnik.com/v4')
word_api = WordApi.WordApi(client)
words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse']
for word in words:
print('=== {} ==='.format(word))
defs = word_api.getDefinitions(word)
if not defs:
print("no definitions")
continue
for def_ in defs:
fmt_str = "{} --- {}"
print(fmt_str.format(def_.sourceDictionary, def_.text.encode('utf-8')))
account_api = AccountApi.AccountApi(client)
for i in range(5):
print("Attempt {}".format(i))
status = account_api.getApiTokenStatus()
print("Remaining_calls: {}".format(status.remainingCalls))
| import nltk
from wordnik import swagger, WordApi, AccountApi
client = swagger.ApiClient(
'dd3d32ae6b4709e1150040139c308fb77446e0a8ecc93db31',
'https://api.wordnik.com/v4')
word_api = WordApi.WordApi(client)
toktok = nltk.ToktokTokenizer()
words = ['paint', 'mimic', 'mimics', 'francie', 'frolic', 'funhouse']
for word in words:
print('=== {} ==='.format(word))
defs = word_api.getDefinitions(word)
if not defs:
print("no definitions")
continue
for def_ in defs:
fmt_str = "{} --- {}"
tokenized_def = toktok.tokenize(def_.text.lower())
tokenized_def = [s.encode('utf-8') for s in tokenized_def]
print(fmt_str.format(def_.sourceDictionary,
tokenized_def))
account_api = AccountApi.AccountApi(client)
for i in range(5):
print("Attempt {}".format(i))
status = account_api.getApiTokenStatus()
print("Remaining_calls: {}".format(status.remainingCalls))
| Add tokenization to the WordNik demo | Add tokenization to the WordNik demo
| Python | mit | tombosc/dict_based_learning,tombosc/dict_based_learning |
a186a29ab028e601bd1f3dff0d37e09e412a7379 | bringing_buzzwords_home/urls.py | bringing_buzzwords_home/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from visualize import views
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'state', views.StateViewSet)
urlpatterns = [
url(r'^$', views.index, name='homepage'),
url(r'^state/(?P<state>\D+)$', views.state, name='state'),
url(r'^json/(?P<state>\D+)$', views.state_json, name='state_json'),
url(r'^admin/', include(admin.site.urls)),
url(r'^county/(?P<county>\d+)$', views.county, name='county'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include(router.urls))]
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import include, url
from django.contrib import admin
from visualize import views
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r'state', views.StateViewSet)
urlpatterns = [
url(r'^$', views.index, name='homepage'),
url(r'^state/(?P<state>\D+)/$', views.state, name='state'),
url(r'^json/(?P<state>\D+)/$', views.state_json, name='state_json'),
url(r'^admin/', include(admin.site.urls)),
url(r'^county/(?P<county>\d+)$', views.county, name='county'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/', include(router.urls))]
urlpatterns += staticfiles_urlpatterns()
| Fix the trailing slash bug. | Fix the trailing slash bug.
| Python | mit | Bringing-Buzzwords-Home/bringing_buzzwords_home,Bringing-Buzzwords-Home/bringing_buzzwords_home,Bringing-Buzzwords-Home/bringing_buzzwords_home |
c2210463fc12121cd4de2a485d7d814d0ebe86b5 | robber/matchers/contain.py | robber/matchers/contain.py | from robber import expect
from base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key')
expect([1, 2, 3]).to.contain(2)
"""
def matches(self):
return self.expected in self.actual
def failure_message(self):
return 'Expected {} to contain {}'.format(self.actual, self.expected)
expect.register('contain', Contain)
| from robber import expect
from base import Base
class Contain(Base):
"""
expect({'key': value}).to.contain('key')
expect([1, 2, 3]).to.contain(2)
"""
def matches(self):
return self.expected in self.actual
def failure_message(self):
return 'Expected {0} to contain {1}'.format(self.actual, self.expected)
expect.register('contain', Contain)
| Add numbers in string format For compatibility with python 2.6 | Add numbers in string format
For compatibility with python 2.6 | Python | mit | vesln/robber.py,taoenator/robber.py |
062e65a161f9c84e5cd18b85790b195eec947b99 | social_website_django_angular/social_website_django_angular/urls.py | social_website_django_angular/social_website_django_angular/urls.py | """social_website_django_angular URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| """social_website_django_angular URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from social_website_django_angular.views import IndexView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url('^.*$', IndexView.as_view(), name='index')
]
| Set up url for index page | Set up url for index page
| Python | mit | tomaszzacharczuk/social-website-django-angular,tomaszzacharczuk/social-website-django-angular,tomaszzacharczuk/social-website-django-angular |
6fd1305f2a4a2e08b51c421b1c2cfdd33b407119 | src/puzzle/problems/problem.py | src/puzzle/problems/problem.py | from data import meta
class Problem(object):
def __init__(self, name, lines):
self.name = name
self.lines = lines
self._solutions = None
self._constraints = []
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
def solutions(self):
if self._solutions is None:
self._solutions = meta.Meta(
(k, v) for k, v in self._solve().items() if all(
[fn(k, v) for fn in self._constraints]
)
)
return self._solutions
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
def __repr__(self):
return '%s()' % self.__class__.__name__
| from data import meta
_THRESHOLD = 0.01
class Problem(object):
def __init__(self, name, lines, threshold=_THRESHOLD):
self.name = name
self.lines = lines
self._threshold = threshold
self._solutions = None
self._constraints = [
lambda k, v: v > self._threshold
]
@property
def kind(self):
return str(type(self)).strip("'<>").split('.').pop()
@property
def solution(self):
return self.solutions().peek()
def constrain(self, fn):
self._constraints.append(fn)
# Invalidate solutions.
self._solutions = None
self._solutions_iter = None
def solutions(self):
if self._solutions is None:
self._solutions_iter = self._solve_iter()
results = []
for k, v in self._solutions_iter:
if all(fn(k, v) for fn in self._constraints):
results.append((k, v))
self._solutions = meta.Meta(results)
return self._solutions
def _solve_iter(self):
return iter(self._solve().items())
def _solve(self):
"""Solves Problem.
Returns:
dict Dict mapping solution to score.
"""
raise NotImplementedError()
def __repr__(self):
return '%s()' % self.__class__.__name__
| Set a threshold on Problem and enforce it. | Set a threshold on Problem and enforce it.
| Python | mit | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge |
84c225a9ca7529bca74f853e7b23a1a750647d6f | campus02/urls.py | campus02/urls.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^$', include('campus02.base.urls', namespace='base')),
)
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns(
'',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
| Fix URL routing for index page. | Fix URL routing for index page.
| Python | mit | fladi/django-campus02,fladi/django-campus02 |
d44fee53020470e2d9a8cd2393f5f0125dbd1fab | python/client.py | python/client.py | import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
else:
print(response.Result)
if __name__ == '__main__':
run()
| import grpc
import hello_pb2
import hello_pb2_grpc
def run():
channel = grpc.insecure_channel('localhost:50051')
stub = hello_pb2_grpc.HelloServiceStub(channel)
# ideally, you should have try catch block here too
response = stub.SayHello(hello_pb2.HelloReq(Name='Euler'))
print(response.Result)
try:
response = stub.SayHelloStrict(hello_pb2.HelloReq(
Name='Leonhard Euler'))
except grpc.RpcError as e:
# ouch!
# lets print the gRPC error message
# which is "Length of `Name` cannot be more than 10 characters"
print(e.details())
# lets access the error code, which is `INVALID_ARGUMENT`
# `type` of `status_code` is `grpc.StatusCode`
status_code = e.code()
# should print `INVALID_ARGUMENT`
print(status_code.name)
# should print `(3, 'invalid argument')`
print(status_code.value)
# want to do some specific action based on the error?
if grpc.StatusCode.INVALID_ARGUMENT == status_code:
# do your stuff here
pass
else:
print(response.Result)
if __name__ == '__main__':
run()
| Update python version for better error handling | Update python version for better error handling
| Python | mit | avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors,avinassh/grpc-errors |
97229a7e51279906254a7befa0456a4c89a9b0ea | planner/models.py | planner/models.py | from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
| from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
def __unicode__(self):
return str(self.waypoint)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
def __unicode__(self):
return str(self.description)
| Add unicode methods to model classes | Add unicode methods to model classes
| Python | apache-2.0 | jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip |
1aef29a64886522d81d2f6a15bd4e48419a66545 | ziggy/__init__.py | ziggy/__init__.py | # -*- coding: utf-8 -*-
"""
Ziggy
~~~~~~~~
:copyright: (c) 2012 by Rhett Garber
:license: ISC, see LICENSE for more details.
"""
__title__ = 'ziggy'
__version__ = '0.0.1'
__build__ = 0
__author__ = 'Rhett Garber'
__license__ = 'ISC'
__copyright__ = 'Copyright 2012 Rhett Garber'
import logging
from . import utils
from . import network
from .context import Context, set, append, add
from . import context as _context_mod
from .errors import Error
from .timer import timeit
log = logging.getLogger(__name__)
def configure(host, port, recorder=None):
"""Initialize ziggy
This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will
be silently dropped.
Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or
to the specified recorder function
"""
global _record_function
if recorder:
context._recorder_function = recorder
elif host and port:
network.init(host, port)
context._recorder_function = network.send
else:
log.warning("Empty ziggy configuration")
| # -*- coding: utf-8 -*-
"""
Ziggy
~~~~~~~~
:copyright: (c) 2012 by Rhett Garber
:license: ISC, see LICENSE for more details.
"""
__title__ = 'ziggy'
__version__ = '0.0.1'
__build__ = 0
__author__ = 'Rhett Garber'
__license__ = 'ISC'
__copyright__ = 'Copyright 2012 Rhett Garber'
import logging
from . import utils
from . import network
from .context import Context, set, append, add
from . import context as _context_mod
from .errors import Error
from .timer import timeit
log = logging.getLogger(__name__)
def configure(host, port, recorder=None):
"""Initialize ziggy
This instructs the ziggy system where to send it's logging data. If ziggy is not configured, log data will
be silently dropped.
Currently we support logging through the network (and the configured host and port) to a ziggyd instances, or
to the specified recorder function
"""
global _record_function
if recorder:
context._recorder_function = recorder
elif host and port:
network.init(host, port)
context._recorder_function = network.send
else:
log.warning("Empty ziggy configuration")
context._recorder_function = None
| Allow unsetting of configuration (for testing) | Allow unsetting of configuration (for testing)
| Python | isc | rhettg/Ziggy,rhettg/BlueOx |
168a7c9b9f5c0699009d8ef6eea0078c2a6a19cc | oonib/handlers.py | oonib/handlers.py | import types
from cyclone import escape
from cyclone import web
class OONIBHandler(web.RequestHandler):
def write_error(self, status_code, exception=None, **kw):
self.set_status(status_code)
if exception:
self.write({'error': exception.log_message})
def write(self, chunk):
"""
This is a monkey patch to RequestHandler to allow us to serialize also
json list objects.
"""
if isinstance(chunk, types.ListType):
chunk = escape.json_encode(chunk)
web.RequestHandler.write(self, chunk)
self.set_header("Content-Type", "application/json")
else:
web.RequestHandler.write(self, chunk)
| import types
from cyclone import escape
from cyclone import web
class OONIBHandler(web.RequestHandler):
def write_error(self, status_code, exception=None, **kw):
self.set_status(status_code)
if hasattr(exception, 'log_message'):
self.write({'error': exception.log_message})
else:
self.write({'error': 'error'})
def write(self, chunk):
"""
This is a monkey patch to RequestHandler to allow us to serialize also
json list objects.
"""
if isinstance(chunk, types.ListType):
chunk = escape.json_encode(chunk)
web.RequestHandler.write(self, chunk)
self.set_header("Content-Type", "application/json")
else:
web.RequestHandler.write(self, chunk)
| Handle writing exceptions that don't have log_exception attribute | Handle writing exceptions that don't have log_exception attribute
| Python | bsd-2-clause | dstufft/ooni-backend,dstufft/ooni-backend |
8c90485e5cab6294a38cfc9332eda6fe8ca15483 | project/config.py | project/config.py | import os
config = {}
system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR')
system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR')
config['HOST'] = ''
config['PORT'] = 5000
config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost'
config['MONGODB_PORT'] = 27017
config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost'
config['ELASTIC_PORT'] = 9200
config['ACCEPTED_ORIGINS'] = ['http://104.236.77.225', 'http://localhost:3000']
| import os
config = {}
system_mongo_host = os.environ.get('MONGODB_PORT_27017_TCP_ADDR')
system_elastic_host = os.environ.get('ELASTIC_PORT_9300_TCP_ADDR')
config['HOST'] = ''
config['PORT'] = 5000
config['MONGODB_HOST'] = system_mongo_host if system_mongo_host else 'localhost'
config['MONGODB_PORT'] = 27017
config['ELASTIC_HOST'] = system_elastic_host if system_elastic_host else 'localhost'
config['ELASTIC_PORT'] = 9200
config['ACCEPTED_ORIGINS'] = ['http://beta.founderati.io', 'http://beta.thehookemup.com', 'http://104.236.77.225', 'http://localhost:3000']
| Add two new domains to whitelist for CORS. | Add two new domains to whitelist for CORS.
| Python | apache-2.0 | AustinStoneProjects/Founderati-Server,AustinStoneProjects/Founderati-Server |
616bd7c5ff8ba5fe5dd190a459b93980613a3ad4 | myuw_mobile/restclients/dao_implementation/hfs.py | myuw_mobile/restclients/dao_implementation/hfs.py | from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost/'
if Live.pool == None:
Live.pool = get_con_pool(host, None, None)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
| from os.path import dirname
from restclients.dao_implementation.mock import get_mockdata_url
from restclients.dao_implementation.live import get_con_pool, get_live_url
import logging
from myuw_mobile.logger.logback import log_info
class File(object):
"""
This implementation returns mock/static content.
Use this DAO with this configuration:
RESTCLIENTS_HFS_DAO_CLASS = 'myuw_mobile.restclients.dao_implementation.hfs.File'
"""
def getURL(self, url, headers):
"""
Return the url for accessing the mock data in local file
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
return get_mockdata_url("hfs", "file", url, headers,
dir_base=dirname(__file__))
class Live(object):
"""
This DAO provides real data.
Access is restricted to localhost.
"""
logger = logging.getLogger('myuw_mobile.restclients.dao_implementation.hfs.Live')
pool = None
def getURL(self, url, headers):
"""
Return the absolute url for accessing live data
:param url:
in the format of "hfs/servlet/hfservices?sn=<student number>"
"""
host = 'http://localhost:80/'
if Live.pool is None:
Live.pool = get_con_pool(host, None, None,
socket_timeout=5.0,
max_pool_size=5)
log_info(Live.logger, Live.pool)
return get_live_url (Live.pool, 'GET',
host, url, headers=headers)
| Fix bug: must specify the port number. | Fix bug: must specify the port number.
| Python | apache-2.0 | uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,fanglinfang/myuw,uw-it-aca/myuw |
882175afea0e2c35e2b223e15feb195a005f7d42 | common/config.py | common/config.py | #
# Poet Configurations
#
# client authentication token
AUTH = 'b9c39a336bb97a9c9bda2b82bdaacff3'
# directory to save output files to
ARCHIVE_DIR = 'archive'
#
# The below configs let you bake in the server IP and beacon interval
# into the final executable so it can simply be executed without supplying
# command line arguments.
#
# server IP
#
# if this is None, it *must* be specified as a command line argument
# when client is executed
#
# SERVER_IP = '1.2.3.4' # example
SERVER_IP = None
# client beacon interval
#
# if this is None, it *may* be specified as a command line argument,
# otherwise, it will take the default value
#
# BEACON_INTERVAL = 300 # example
BEACON_INTERVAL = None
| #
# Poet Configurations
#
# default client authentication token. change this to whatever you want!
AUTH = 'b9c39a336bb97a9c9bda2b82bdaacff3'
# directory to save output files to
ARCHIVE_DIR = 'archive'
#
# The below configs let you bake in the server IP and beacon interval
# into the final executable so it can simply be executed without supplying
# command line arguments.
#
# server IP
#
# if this is None, it *must* be specified as a command line argument
# when client is executed
#
# SERVER_IP = '1.2.3.4' # example
SERVER_IP = None
# client beacon interval
#
# if this is None, it *may* be specified as a command line argument,
# otherwise, it will take the default value
#
# BEACON_INTERVAL = 300 # example
BEACON_INTERVAL = None
| Add comment about changing auth token | Add comment about changing auth token
| Python | mit | mossberg/poet |
1262a213f5330ab3ac62581cd93a484eb72ebd60 | picaxe/urls.py | picaxe/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.sites.models import Site
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'picaxe.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'photologue/', include('photologue.urls', namespace='photologue')),
)
admin.site.unregister(Site)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.sites.models import Site
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'picaxe.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^photologue/', include('photologue.urls', namespace='photologue')),
)
admin.site.unregister(Site)
| Add missing ^ to regex | Add missing ^ to regex
| Python | mit | TuinfeesT/PicAxe |
5d0fa6d6f66cce56b9704601c4399ca0adcc419a | programmingtheorems/python/theorem_of_selection.py | programmingtheorems/python/theorem_of_selection.py | #! /usr/bin/env python
# Copyright Lajos Katona
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def selection_brute(mylist, t):
for i, l in enumerate(mylist):
if t == l:
return i
return 0
def selection_pythonic(mylist, t):
return mylist.index(t)
if __name__ == '__main__':
mylist = [1, 2, 3, 4, 5]
print(selection_brute(mylist, 4))
print(selection_pythonic(mylist, 4)) | #! /usr/bin/env python
# Copyright Lajos Katona
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def selection_brute(mylist, t):
for i, l in enumerate(mylist):
if t == l:
return i
return 0
def selection_pythonic(mylist, t):
return mylist.index(t)
if __name__ == '__main__':
mylist = [1, 2, 3, 4, 5]
print(selection_brute(mylist, 4))
print(selection_pythonic(mylist, 4))
| Fix pep8 error with new newline | Fix pep8 error with new newline
Change-Id: I47b12c62eb1653bcbbe552464aab72c486bbd1cc
| Python | apache-2.0 | elajkat/hugradexam,elajkat/hugradexam |
296f7aa83244af1cc63c4f7cdeae2d2adf6ac51d | moodlefuse/exception.py | moodlefuse/exception.py | #!/usr/bin/env python
# encoding: utf-8
"""Class to handle Exceptions relating to MoodleFUSE actions
"""
from functools import wraps
from moodlefuse.core import config
class MoodleFuseException(Exception):
def __init__(self, debug_info):
exception_reason = "ERROR ENCOUNTERED: MoodleFUSE has encountered an error."
debug_info = debug_info
self.message = exception_reason + debug_info
def __str__(self):
return self.message
def throws_moodlefuse_error(moodlefuse_error):
def inner(f):
def wrapped(*args):
try:
return f(*args)
except Exception, e:
if config['DEBUG'] is False:
raise moodlefuse_error()
else:
raise e
return wraps(f)(wrapped)
return inner
| #!/usr/bin/env python
# encoding: utf-8
"""Class to handle Exceptions relating to MoodleFUSE actions
"""
from functools import wraps
from moodlefuse.core import config
class MoodleFuseException(Exception):
def __init__(self, debug_info):
_EXCEPTION_REASON = "ERROR ENCOUNTERED: MoodleFUSE has encountered an error."
self.message = _EXCEPTION_REASON + debug_info
def __str__(self):
return self.message
def throws_moodlefuse_error(moodlefuse_error):
def inner(f):
def wrapped(*args):
try:
return f(*args)
except Exception, e:
if config['DEBUG'] is False:
raise moodlefuse_error()
else:
raise e
return wraps(f)(wrapped)
return inner
| Change error message to local constant | Change error message to local constant
| Python | mit | BroganD1993/MoodleFUSE,BroganD1993/MoodleFUSE |
0e99654d606038098d45fb83cc40405742e43ae8 | readthedocs/builds/filters.py | readthedocs/builds/filters.py | from django.utils.translation import ugettext_lazy as _
import django_filters
from builds import constants
from builds.models import Build, Version
ANY_REPO = (
('', _('Any')),
)
BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES
class VersionFilter(django_filters.FilterSet):
project = django_filters.CharFilter(name='project__name', lookup_type="icontains")
slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains')
class Meta:
model = Version
fields = ['project', 'slug']
class BuildFilter(django_filters.FilterSet):
date = django_filters.DateRangeFilter(label=_("Build Date"), name="date")
type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES)
class Meta:
model = Build
fields = ['type', 'date', 'version', 'success']
| from django.utils.translation import ugettext_lazy as _
import django_filters
from builds import constants
from builds.models import Build, Version
ANY_REPO = (
('', _('Any')),
)
BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES
class VersionFilter(django_filters.FilterSet):
project = django_filters.CharFilter(name='project__name', lookup_type="icontains")
slug= django_filters.CharFilter(label=_("Slug"), name='slug', lookup_type='icontains')
class Meta:
model = Version
fields = ['project', 'slug']
class BuildFilter(django_filters.FilterSet):
date = django_filters.DateRangeFilter(label=_("Build Date"), name="date")
type = django_filters.ChoiceFilter(label=_("Build Type"), choices=BUILD_TYPES)
class Meta:
model = Build
fields = ['type', 'date', 'success']
| Remove version from Build filter. | Remove version from Build filter.
| Python | mit | agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,nyergler/pythonslides,Tazer/readthedocs.org,techtonik/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,GovReady/readthedocs.org,nikolas/readthedocs.org,gjtorikian/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,CedarLogic/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,hach-que/readthedocs.org,laplaceliu/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,kdkeyser/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,michaelmcandrew/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,LukasBoersma/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,emawind84/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,clarkperkins/readthedocs.org,mhils/readthedocs.org,takluyver/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,wijerasa/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,Tazer/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,nyergler/pythonslides,nikolas/readthedocs.org,michaelmcandrew/readthedocs.org,CedarLogic/readthedocs.org,safwanrahman/readthedocs.org,takluyver/readthedocs.org,royalwang/readthedocs.org,espdev/readthedocs.org,hach-que/readthedocs.org,Carreau/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,d0ugal/readthedocs.org,sid-kap/readthedocs.org,fujita-shintaro/readthedocs.org,kdkeyser/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,asampat3090/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,nyergler/pythonslides,titiushko/readthedocs.org,techtonik/readthedocs.org,wanghaven/readthedocs.org,stevepiercy/readthedocs.org,rtfd/readthedocs.org,sunnyzwh/readthedocs.org,mhils/readthedocs.org,d0ugal/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,SteveViss/readthedocs.org,mrshoki/readthedocs.org,raven47git/readthedocs.org,atsuyim/readthedocs.org,wanghaven/readthedocs.org,soulshake/readthedocs.org,agjohnson/readthedocs.org,Carreau/readthedocs.org,tddv/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,nikolas/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,kenshinthebattosai/readthedocs.org,hach-que/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,agjohnson/readthedocs.org,agjohnson/readthedocs.org,jerel/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,nikolas/readthedocs.org,emawind84/readthedocs.org,rtfd/readthedocs.org,wijerasa/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,sils1297/readthedocs.org,ojii/readthedocs.org,stevepiercy/readthedocs.org,LukasBoersma/readthedocs.org,raven47git/readthedocs.org,clarkperkins/readthedocs.org,wijerasa/readthedocs.org,CedarLogic/readthedocs.org,GovReady/readthedocs.org,singingwolfboy/readthedocs.org,kenwang76/readthedocs.org,mrshoki/readthedocs.org,istresearch/readthedocs.org,dirn/readthedocs.org,mrshoki/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,Tazer/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,mrshoki/readthedocs.org,pombredanne/readthedocs.org,SteveViss/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,LukasBoersma/readthedocs.org,singingwolfboy/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,safwanrahman/readthedocs.org,VishvajitP/readthedocs.org,wanghaven/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,clarkperkins/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,gjtorikian/readthedocs.org,fujita-shintaro/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,attakei/readthedocs-oauth,michaelmcandrew/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,pombredanne/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,cgourlay/readthedocs.org,d0ugal/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,SteveViss/readthedocs.org,mhils/readthedocs.org,Carreau/readthedocs.org,laplaceliu/readthedocs.org,KamranMackey/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,gjtorikian/readthedocs.org,titiushko/readthedocs.org |
1778b3943ed55819ef38a46e8fdc417ed835176f | run.py | run.py | #!/usr/bin/env python
import os
import signal
import sys
from app.main import app, queues, sched
def _teardown(signal, frame):
sched.shutdown(wait=False)
for queue in queues.values():
queue.put(None)
queues.clear()
# Let the interrupt bubble up so that Flask/Werkzeug see it
raise KeyboardInterrupt
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
app.debug = True
signal.signal(signal.SIGINT, _teardown)
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
| #!/usr/bin/env python
import logging
import os
import signal
import sys
from app.main import app, queues, sched
def _teardown(signal, frame):
sched.shutdown(wait=False)
for queue in queues.values():
queue.put(None)
queues.clear()
# Let the interrupt bubble up so that Flask/Werkzeug see it
raise KeyboardInterrupt
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
logging.basicConfig()
app.debug = True
signal.signal(signal.SIGINT, _teardown)
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
| Configure logger in debug mode | Configure logger in debug mode
| Python | mit | martinp/jarvis2,mpolden/jarvis2,Foxboron/Frank,Foxboron/Frank,martinp/jarvis2,mpolden/jarvis2,martinp/jarvis2,Foxboron/Frank,mpolden/jarvis2 |
0b7636422c632172dfc68ea2a5f21ec649248c8c | nimp/commands/vs_build.py | nimp/commands/vs_build.py | # -*- coding: utf-8 -*-
from nimp.commands._command import *
from nimp.utilities.build import *
#-------------------------------------------------------------------------------
class VsBuildCommand(Command):
def __init__(self):
Command.__init__(self, 'vs-build', 'Builds a Visual Studio project')
#---------------------------------------------------------------------------
def configure_arguments(self, env, parser):
parser.add_argument('solution',
help = 'Solution file',
metavar = '<FILE>')
parser.add_argument('project',
help = 'Project',
metavar = '<FILE>',
default = 'None')
parser.add_argument('--target',
help = 'Target',
metavar = '<TARGET>',
default = 'Build')
parser.add_argument('-c',
'--configuration',
help = 'configuration to build',
metavar = '<configuration>',
default = 'release')
parser.add_argument('-p',
'--platform',
help = 'platform to build',
metavar = '<platform>',
default = 'Win64')
parser.add_argument('--vs-version',
help = 'VS version to use',
metavar = '<VERSION>',
default = '12')
return True
#---------------------------------------------------------------------------
def run(self, env):
return vsbuild(env.solution, env.platform, env.configuration, env.project, env.vs_version, env.target)
| # -*- coding: utf-8 -*-
from nimp.commands._command import *
from nimp.utilities.build import *
#-------------------------------------------------------------------------------
class VsBuildCommand(Command):
def __init__(self):
Command.__init__(self, 'vs-build', 'Builds a Visual Studio project')
#---------------------------------------------------------------------------
def configure_arguments(self, env, parser):
parser.add_argument('solution',
help = 'Solution file',
metavar = '<FILE>')
parser.add_argument('project',
help = 'Project',
metavar = '<FILE>',
default = 'None')
parser.add_argument('--target',
help = 'Target',
metavar = '<TARGET>',
default = 'Build')
parser.add_argument('-c',
'--vs-configuration',
help = 'configuration to build',
metavar = '<vs-configuration>',
default = 'release')
parser.add_argument('-p',
'--vs-platform',
help = 'platform to build',
metavar = '<vs-platform>',
default = 'Win64')
parser.add_argument('--vs-version',
help = 'VS version to use',
metavar = '<VERSION>',
default = '12')
return True
#---------------------------------------------------------------------------
def run(self, env):
return vsbuild(env.solution, env.vs_platform, env.vs_configuration, env.project, env.vs_version, env.target)
| Use separate variable names for Visual Studio config/platform. | Use separate variable names for Visual Studio config/platform.
| Python | mit | dontnod/nimp |
84b01f0015163dc016293162f1525be76329e602 | pythonforandroid/recipes/cryptography/__init__.py | pythonforandroid/recipes/cryptography/__init__.py | from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe
class CryptographyRecipe(CompiledComponentsPythonRecipe):
name = 'cryptography'
version = '2.4.2'
url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz'
depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools',
'enum34', 'ipaddress', 'cffi']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super(CryptographyRecipe, self).get_recipe_env(arch)
openssl_recipe = Recipe.get_recipe('openssl', self.ctx)
env['CFLAGS'] += openssl_recipe.include_flags(arch)
env['LDFLAGS'] += openssl_recipe.link_flags(arch)
return env
recipe = CryptographyRecipe()
| from pythonforandroid.recipe import CompiledComponentsPythonRecipe, Recipe
class CryptographyRecipe(CompiledComponentsPythonRecipe):
name = 'cryptography'
version = '2.4.2'
url = 'https://github.com/pyca/cryptography/archive/{version}.tar.gz'
depends = ['openssl', 'idna', 'asn1crypto', 'six', 'setuptools',
'enum34', 'ipaddress', 'cffi']
call_hostpython_via_targetpython = False
def get_recipe_env(self, arch):
env = super(CryptographyRecipe, self).get_recipe_env(arch)
openssl_recipe = Recipe.get_recipe('openssl', self.ctx)
env['CFLAGS'] += openssl_recipe.include_flags(arch)
env['LDFLAGS'] += openssl_recipe.link_dirs_flags(arch)
env['LIBS'] = openssl_recipe.link_libs_flags()
return env
recipe = CryptographyRecipe()
| Move libraries from LDFLAGS to LIBS for cryptography recipe | Move libraries from LDFLAGS to LIBS for cryptography recipe
Because this is how you are supposed to do it, you must use LDFLAGS for linker flags and LDLIBS (or the equivalent LOADLIBES) for the libraries
| Python | mit | kronenpj/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,germn/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,germn/python-for-android,kivy/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,kivy/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,germn/python-for-android,kronenpj/python-for-android,PKRoma/python-for-android,rnixx/python-for-android |
72301067306d6baf4aab0315a769c75dd585b46d | pi_setup/boot_config.py | pi_setup/boot_config.py | #!/usr/bin/env python
from utils import file_templates
from utils.validation import is_valid_gpu_mem
def main():
gpu_mem = 0
while gpu_mem == 0:
user_input = raw_input("Enter GPU memory in MB (16/32/64/128/256): ")
if is_valid_gpu_mem(user_input):
gpu_mem = user_input
else:
print("Acceptable memory values are: 16/32/64/128/256")
update_file('/boot/config.txt', gpu_mem)
def update_file(path, gpu_mem):
data = {
'gpu_mem': gpu_mem
}
template_name = path.split('/')[-1]
new_file_data = file_templates.build(template_name, data)
with open(path, 'w') as f:
f.write(new_file_data)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from utils import file_templates
from utils.validation import is_valid_gpu_mem
def main():
user_input = raw_input("Want to change the GPU memory split? (Y/N): ")
if user_input == 'Y':
gpu_mem = 0
while gpu_mem == 0:
mem_split = raw_input("Enter GPU memory in MB (16/32/64/128/256): ")
if is_valid_gpu_mem(mem_split):
gpu_mem = mem_split
else:
print("Acceptable memory values are: 16/32/64/128/256")
update_file('/boot/config.txt', gpu_mem)
else:
print("Skipping GPU memory split...")
def update_file(path, gpu_mem):
data = {
'gpu_mem': gpu_mem
}
template_name = path.split('/')[-1]
new_file_data = file_templates.build(template_name, data)
with open(path, 'w') as f:
f.write(new_file_data)
if __name__ == '__main__':
main()
| Make GPU mem split optional | Make GPU mem split optional
| Python | mit | projectweekend/Pi-Setup,projectweekend/Pi-Setup |
2fe5f960f4998a0337bceabd7db930ac5d5a4fd1 | qipipe/qiprofile/helpers.py | qipipe/qiprofile/helpers.py | import re
from datetime import datetime
TRAILING_NUM_REGEX = re.compile("(\d+)$")
"""A regular expression to extract the trailing number from a string."""
DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$")
class DateError(Exception):
pass
def trailing_number(s):
"""
:param s: the input string
:return: the trailing number in the string, or None if there
is none
"""
match = TRAILING_NUM_REGEX.search(s)
if match:
return int(match.group(1))
def default_parser(attribute):
"""
Retuns the default parser, determined as follows:
* If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser
:param attribute: the row attribute
:return: the function or lambda value parser, or None if none
"""
if attribute.endswith('date'):
return _parse_date
def _parse_date(s):
"""
:param s: the input date string
:return: the parsed datetime
:rtype: datetime
"""
match = DATE_REGEX.match(s)
if not match:
raise DateError("Date is not in a supported format: %s" % s)
m, d, y = map(int, match.groups()[:3])
if y < 20:
y += 2000
elif y < 100:
y += 1900
return datetime(y, m, d)
| import re
from datetime import datetime
TRAILING_NUM_REGEX = re.compile("(\d+)$")
"""A regular expression to extract the trailing number from a string."""
DATE_REGEX = re.compile("(0?\d|1[12])/(0?\d|[12]\d|3[12])/((19|20)?\d\d)$")
class DateError(Exception):
pass
def trailing_number(s):
"""
:param s: the input string
:return: the trailing number in the string, or None if there
is none
"""
match = TRAILING_NUM_REGEX.search(s)
if match:
return int(match.group(1))
def default_parser(attribute):
"""
Retuns the default parser, determined as follows:
* If the attribute ends in ``date``, then a MM/DD/YYYY datetime parser
:param attribute: the row attribute
:return: the value parser function, or None if none
"""
if attribute.endswith('date'):
return _parse_date
def _parse_date(s):
"""
:param s: the input date string
:return: the parsed datetime
:rtype: datetime
"""
match = DATE_REGEX.match(s)
if not match:
raise DateError("Date is not in a supported format: %s" % s)
m, d, y = map(int, match.groups()[:3])
if y < 20:
y += 2000
elif y < 100:
y += 1900
return datetime(y, m, d)
| Change lambda to function in doc. | Change lambda to function in doc.
| Python | bsd-2-clause | ohsu-qin/qipipe |
2f360d9986c13adaaf670b80b27dad995823b849 | bandstructure/system/tightbindingsystem.py | bandstructure/system/tightbindingsystem.py | import numpy as np
from .system import System
class TightBindingSystem(System):
def setDefaultParams(self):
self.params.setdefault('t', 1) # nearest neighbor tunneling strength
self.params.setdefault('t2', 0) # next-nearest neighbor ..
def tunnelingRate(self, dr):
t = self.get("t")
t2 = self.get("t2")
# Nearest neighbors:
# Only with newest numpy version:
# nn = np.linalg.norm(dr, axis=3) == 1 # TODO! get the real nearest neighbor distance
# nnn = np.linalg.norm(dr, axis=3) == 2 # TODO!
nn = np.sqrt(np.sum(dr ** 2, axis=3)) == 1 # TODO! get the real nearest neighbor distance
nnn = np.sqrt(np.sum(dr ** 2, axis=3)) == 2 # TODO
# Orbital matrix
m = np.array([[1, 0], [0, -1]])
# m = np.array([-t])
return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
| import numpy as np
from .system import System
class TightBindingSystem(System):
def setDefaultParams(self):
self.params.setdefault('t', 1) # nearest neighbor tunneling strength
self.params.setdefault('t2', 0) # next-nearest neighbor ..
def tunnelingRate(self, dr):
t = self.get("t")
t2 = self.get("t2")
# Orbital matrix
m = np.array([[1, 0], [0, -1]])
# m = np.array([-t])
nn = dr.getNeighborsMask(1)
nnn = dr.getNeighborsMask(2)
return t * m * nn[:, :, :, None, None] + t2 * m * nnn[:, :, :, None, None]
| Use new functions for getting (next) nearest neighbors | Use new functions for getting (next) nearest neighbors
| Python | mit | sharkdp/bandstructure,sharkdp/bandstructure |
086281e93c1868418f8845aa9366c75319b0e8c7 | newsman/test/test_rss_parser.py | newsman/test/test_rss_parser.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
sys.path.append('..')
from analyzer import rss_parser
#entries = rss_parser.parse(feed_link='http://news.yahoo.com/rss/us', language='en')
#entries = rss_parser.parse(feed_link='http://www.engadget.com/rss.xml', language='en')
#entries = rss_parser.parse(feed_link='http://rss.cnn.com/rss/edition_sport.rss', language='en')
entries = rss_parser.parse(feed_link='http://news.yahoo.com/rss/sports', language='en')
entries = rss_parser.parse(feed_link='http://rss.detik.com/index.php/sport', feed_id="5264f7cb0ff6cb1898609028", language='in', categories={"ID::Olahraga" : "1"})
print len(entries)
print entries[0]
| #!/usr/bin/env python
#-*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
sys.path.append('..')
from analyzer import rss_parser
#entries = rss_parser.parse(feed_link='http://news.yahoo.com/rss/us', language='en')
#entries = rss_parser.parse(feed_link='http://www.engadget.com/rss.xml', language='en')
#entries = rss_parser.parse(feed_link='http://rss.cnn.com/rss/edition_sport.rss', language='en')
#entries = rss_parser.parse(feed_link='http://news.yahoo.com/rss/sports', language='en')
entries = rss_parser.parse(feed_link='http://rss.detik.com/index.php/sport', feed_id="5264f7cb0ff6cb1898609028", language='in', categories={"ID::Olahraga" : "1"})
print len(entries)
print entries[0]
| Comment out a test case | Comment out a test case
| Python | agpl-3.0 | chengdujin/newsman,chengdujin/newsman,chengdujin/newsman |
611f95b0c72e436ebf056329349216625c61e133 | wagtail/tests/testapp/migrations/0009_defaultstreampage.py | wagtail/tests/testapp/migrations/0009_defaultstreampage.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-21 11:37
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0030_index_on_pagerevision_created_at'),
('tests', '0008_inlinestreampage_inlinestreampagesection'),
]
operations = [
migrations.CreateModel(
name='DefaultStreamPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-21 11:37
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0029_unicode_slugfield_dj19'),
('tests', '0008_inlinestreampage_inlinestreampagesection'),
]
operations = [
migrations.CreateModel(
name='DefaultStreamPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.wagtailcore.fields.StreamField((('text', wagtail.wagtailcore.blocks.CharBlock()), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('image', wagtail.wagtailimages.blocks.ImageChooserBlock())), default='')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| Adjust testapp migration dependency to be valid on 1.6.x | Adjust testapp migration dependency to be valid on 1.6.x
| Python | bsd-3-clause | mixxorz/wagtail,nutztherookie/wagtail,rsalmaso/wagtail,torchbox/wagtail,chrxr/wagtail,gasman/wagtail,zerolab/wagtail,Toshakins/wagtail,takeflight/wagtail,FlipperPA/wagtail,iansprice/wagtail,takeflight/wagtail,wagtail/wagtail,nilnvoid/wagtail,nealtodd/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,chrxr/wagtail,takeflight/wagtail,wagtail/wagtail,iansprice/wagtail,iansprice/wagtail,thenewguy/wagtail,nimasmi/wagtail,gasman/wagtail,Toshakins/wagtail,torchbox/wagtail,nealtodd/wagtail,mixxorz/wagtail,wagtail/wagtail,mixxorz/wagtail,nimasmi/wagtail,jnns/wagtail,nutztherookie/wagtail,timorieber/wagtail,kaedroho/wagtail,rsalmaso/wagtail,kaedroho/wagtail,mixxorz/wagtail,chrxr/wagtail,FlipperPA/wagtail,nealtodd/wagtail,rsalmaso/wagtail,torchbox/wagtail,nilnvoid/wagtail,jnns/wagtail,nimasmi/wagtail,zerolab/wagtail,gasman/wagtail,mikedingjan/wagtail,Toshakins/wagtail,rsalmaso/wagtail,timorieber/wagtail,jnns/wagtail,nealtodd/wagtail,iansprice/wagtail,torchbox/wagtail,nimasmi/wagtail,mixxorz/wagtail,mikedingjan/wagtail,nutztherookie/wagtail,kaedroho/wagtail,Toshakins/wagtail,zerolab/wagtail,kaedroho/wagtail,thenewguy/wagtail,thenewguy/wagtail,timorieber/wagtail,chrxr/wagtail,mikedingjan/wagtail,gasman/wagtail,rsalmaso/wagtail,zerolab/wagtail,gasman/wagtail,nutztherookie/wagtail,wagtail/wagtail,zerolab/wagtail,kaedroho/wagtail,wagtail/wagtail,FlipperPA/wagtail,jnns/wagtail,nilnvoid/wagtail,mikedingjan/wagtail,timorieber/wagtail,thenewguy/wagtail,takeflight/wagtail,thenewguy/wagtail |
c5946e378147f6d4d42c7a3e531388e6203f29e4 | fantasyStocks/static/stockCleaner.py | fantasyStocks/static/stockCleaner.py | import json
with open("stocks.json") as f:
| from pprint import pprint
import json
import re
REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*")
with open("stocks.json") as f:
l = json.loads(f.read())
out = []
for i in l:
if not "^" in i["symbol"]:
out.append(i)
with open("newStocks.json", "w") as w:
w.write(json.dumps(out))
| Write script to remove duplicates from stocks.json | Write script to remove duplicates from stocks.json
| Python | apache-2.0 | ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks |
d1c3f195e455b926429aadf84cfd9fc51db2802f | fluent_contents/tests/test_models.py | fluent_contents/tests/test_models.py | from django.contrib.contenttypes.models import ContentType
from fluent_contents.models import ContentItem
from fluent_contents.tests.utils import AppTestCase
class ModelTests(AppTestCase):
"""
Testing the data model.
"""
def test_stale_model_str(self):
"""
No matter what, the ContentItem.__str__() should work.
This would break the admin delete screen otherwise.
"""
c = ContentType()
c.save()
a = ContentItem(polymorphic_ctype=c)
self.assertEqual(str(a), "'(type deleted) 0' in 'None None'")
| import django
from django.contrib.contenttypes.models import ContentType
from fluent_contents.models import ContentItem
from fluent_contents.tests.utils import AppTestCase
class ModelTests(AppTestCase):
"""
Testing the data model.
"""
def test_stale_model_str(self):
"""
No matter what, the ContentItem.__str__() should work.
This would break the admin delete screen otherwise.
"""
c = ContentType()
if django.VERSION >= (1, 8):
c.save()
a = ContentItem(polymorphic_ctype=c)
self.assertEqual(str(a), "'(type deleted) 0' in 'None None'")
| Improve tests for older Django versions | Improve tests for older Django versions
| Python | apache-2.0 | edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents |
d677027fb2e99305ec618b42c0bd96fa7f41125d | armaadmin/interface/common.py | armaadmin/interface/common.py | import base64
import os
from armaadmin import users, web
class AuthorizedHandler(web.HTTPHandler):
auth = [ 'Basic', 'Key' ]
realm = 'unknown'
def respond(self):
auth_header = self.request.headers.get('Authorization')
if not auth_header:
return unauthorized()
try:
self.auth_type, self.auth_string = auth_header.split(' ', 1)
#Ignore bad Authorization headers
except:
return unauthorized()
if not self.auth_type in self.auth:
return unauthorized()
web.HTTPHandler.respond(self)
def unauthorized(self):
self.response.headers.set('WWW-Authenticate', ','.join(self.auth) + ' realm="' + self.realm + '"')
raise web.HTTPError(401)
class PageHandler(web.HTTPHandler):
page = 'index.html'
def do_get(self):
with open(os.path.dirname(__file__) + '/html/' + self.page, 'r') as file:
self.response.headers.set('Content-Type', 'text/html')
return 200, file.read()
| import base64
import os
from armaadmin import users, web
class AuthorizedHandler(web.HTTPHandler):
auth = [ 'Basic', 'Key' ]
realm = 'unknown'
def respond(self):
auth_header = self.request.headers.get('Authorization')
if not auth_header:
return unauthorized()
try:
self.auth_type, self.auth_string = auth_header.split(' ', 1)
#Ignore bad Authorization headers
except:
return unauthorized()
if not self.auth_type in self.auth:
return unauthorized()
web.HTTPHandler.respond(self)
def unauthorized(self):
self.response.headers.set('WWW-Authenticate', ','.join(self.auth) + ' realm="' + self.realm + '"')
raise web.HTTPError(401)
class PageHandler(web.HTTPHandler):
page = 'index.html'
def do_get(self):
with open(os.path.dirname(__file__) + '/html/' + self.page, 'r') as file:
self.response.headers.set('Content-Type', 'text/html')
return 200, file
| Use stream for lower memory usage | Use stream for lower memory usage
| Python | mit | fkmclane/MCP,fkmclane/MCP,fkmclane/MCP,fkmclane/MCP |
2c8b60569d20a350b33f3c5e8ba00bdc3d9bbee4 | ask_sweden/lambda_function.py | ask_sweden/lambda_function.py | import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
from ask import alexa
def lambda_handler(request_obj, context=None):
return alexa.route_request(request_obj)
@alexa.default
def default_handler(request):
logger.info('default_handler')
return alexa.respond('There were 42 accidents in 2016.')
@alexa.request("LaunchRequest")
def launch_request_handler(request):
logger.info('launch_request_handler')
return alexa.create_response(message='You can ask me about car accidents.')
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
logger.info('session_ended_request_handler')
return alexa.create_response(message="Goodbye!")
@alexa.intent('AMAZON.CancelIntent')
def cancel_intent_handler(request):
logger.info('cancel_intent_handler')
return alexa.create_response(message='ok', end_session=True)
@alexa.intent('AMAZON.HelpIntent')
def help_intent_handler(request):
logger.info('help_intent_handler')
return alexa.create_response(message='You can ask me about car accidents.')
@alexa.intent('AMAZON.StopIntent')
def stop_intent_handler(request):
logger.info('stop_intent_handler')
return alexa.create_response(message='ok', end_session=True)
| import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
from ask import alexa
def lambda_handler(request_obj, context=None):
return alexa.route_request(request_obj)
@alexa.default
def default_handler(request):
logger.info('default_handler')
return alexa.respond('There were 42 accidents in 2016.')
@alexa.request("LaunchRequest")
def launch_request_handler(request):
logger.info('launch_request_handler')
return alexa.respond('You can ask me about car accidents.')
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
logger.info('session_ended_request_handler')
return alexa.respond('Goodbye.')
@alexa.intent('AMAZON.CancelIntent')
def cancel_intent_handler(request):
logger.info('cancel_intent_handler')
return alexa.respond('Okay.', end_session=True)
@alexa.intent('AMAZON.HelpIntent')
def help_intent_handler(request):
logger.info('help_intent_handler')
return alexa.respond('You can ask me about car accidents.')
@alexa.intent('AMAZON.StopIntent')
def stop_intent_handler(request):
logger.info('stop_intent_handler')
return alexa.respond('Okay.', end_session=True)
| Use respond instead of create_response | Use respond instead of create_response
| Python | mit | geoaxis/ask-sweden,geoaxis/ask-sweden |
aa82f91d220e8985c7f6dc68433ad65e70a71d15 | froide/foirequest/tests/test_mail.py | froide/foirequest/tests/test_mail.py | # -*- coding: utf-8 -*-
from __future__ import with_statement
from django.test import TestCase
from foirequest.tasks import _process_mail
from foirequest.models import FoiRequest
class MailTest(TestCase):
fixtures = ['publicbodies.json', "foirequest.json"]
def test_working(self):
with file("foirequest/tests/test_mail_01.txt") as f:
_process_mail(f.read())
request = FoiRequest.objects.get_by_secret_mail("[email protected]")
messages = request.foimessage_set.all()
self.assertEqual(len(messages), 2)
def test_working_with_attachment(self):
with file("foirequest/tests/test_mail_02.txt") as f:
_process_mail(f.read())
request = FoiRequest.objects.get_by_secret_mail("[email protected]")
messages = request.foimessage_set.all()
self.assertEqual(len(messages), 2)
self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data")
self.assertEqual(len(message[1].attachments), 1)
| # -*- coding: utf-8 -*-
from __future__ import with_statement
from django.test import TestCase
from foirequest.tasks import _process_mail
from foirequest.models import FoiRequest
class MailTest(TestCase):
fixtures = ['publicbodies.json', "foirequest.json"]
def test_working(self):
with file("foirequest/tests/test_mail_01.txt") as f:
_process_mail(f.read())
request = FoiRequest.objects.get_by_secret_mail("[email protected]")
messages = request.foimessage_set.all()
self.assertEqual(len(messages), 2)
def test_working_with_attachment(self):
with file("foirequest/tests/test_mail_02.txt") as f:
_process_mail(f.read())
request = FoiRequest.objects.get_by_secret_mail("[email protected]")
messages = request.foimessage_set.all()
self.assertEqual(len(messages), 2)
self.assertEqual(messages[1].subject, u"Fwd: Informationsfreiheitsgesetz des Bundes, Antragsvordruck für Open Data")
self.assertEqual(len(messages[1].attachments), 1)
self.assertEqual(messages[1].attachments[0].name, u"TI - IFG-Antrag, Vordruck.docx")
| Test for attachment in mail test | Test for attachment in mail test | Python | mit | catcosmo/froide,okfse/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide,LilithWittmann/froide,LilithWittmann/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,LilithWittmann/froide,catcosmo/froide,catcosmo/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,ryankanno/froide,CodeforHawaii/froide,okfse/froide,ryankanno/froide,stefanw/froide,okfse/froide,CodeforHawaii/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,fin/froide,stefanw/froide,catcosmo/froide,fin/froide |
20f0d90f5c64322864ad5fda4b4c9314e6c1cb11 | run.py | run.py | #!/usr/bin/env python
# coding=utf-8
import sys
from kitchen.text.converters import getwriter
from utils.log import getLogger, open_log, close_log
from utils.misc import output_exception
from system.factory_manager import Manager
sys.stdout = getwriter('utf-8')(sys.stdout)
sys.stderr = getwriter('utf-8')(sys.stderr)
open_log("output.log")
logger = getLogger("System")
logger.info("Starting up..")
try:
manager = Manager()
except Exception:
logger.critical("Runtime error - process cannot continue!")
output_exception(logger)
finally:
close_log("output.log")
try:
raw_input("Press enter to exit.")
except:
pass
| #!/usr/bin/env python
# coding=utf-8
import os
import sys
from kitchen.text.converters import getwriter
from utils.log import getLogger, open_log, close_log
from utils.misc import output_exception
from system.factory_manager import Manager
sys.stdout = getwriter('utf-8')(sys.stdout)
sys.stderr = getwriter('utf-8')(sys.stderr)
if not os.path.exists("logs"):
os.mkdir("logs")
open_log("output.log")
logger = getLogger("System")
logger.info("Starting up..")
try:
manager = Manager()
except Exception:
logger.critical("Runtime error - process cannot continue!")
output_exception(logger)
finally:
close_log("output.log")
try:
raw_input("Press enter to exit.")
except:
pass
| Create logs folder if it doesn't exist (to prevent errors) | Create logs folder if it doesn't exist (to prevent errors)
| Python | artistic-2.0 | UltrosBot/Ultros,UltrosBot/Ultros |
80215a593c2fdcf0a0ae8b1c61c4342faffd6dac | run.py | run.py | import bb2gh
import time
config_yaml = 'config.yaml'
for issue_id in range(1, 500):
while True:
try:
bb2gh.migrate(config_yaml, verbose=True, issue_ids=[issue_id])
except Exception as inst:
print 'issue_id',issue_id
print type(inst)
print inst.data['message']
print 'waiting for 60 seconds'
print
time.sleep(60)
else:
break
| import bb2gh
import time
config_yaml = 'config.yaml'
for issue_id in range(190, 500):
while True:
try:
bb2gh.migrate(config_yaml, verbose=True, issue_ids=[issue_id])
except Exception as inst:
print 'issue_id',issue_id
print type(inst)
print inst
print 'waiting for 60 seconds'
print
time.sleep(60)
else:
break
| Fix bug, 'message' key throwing error. | Fix bug, 'message' key throwing error.
| Python | mit | wd15/bb2gh |
ba983dea1e20409d403a86d62c300ea3d257b58a | parserscripts/phage.py | parserscripts/phage.py | import re
class Phage:
supported_databases = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parse_phage(raw_text, phage_finder)
def _parse_phage(self, raw_text, phage_finder):
for db, regex in Phage.supported_databases.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.find_by_phage(short_name,
cluster)
self.db = db
| import re
class Phage:
SUPPORTED_DATABASES = {
# European Nucleotide Archive phage database
"ENA": r"^gi\|[0-9]+\|ref\|([^\|]+)\|\ ([^,]+)[^$]*$",
# National Center for Biotechnology Information phage database
"NCBI": r"^ENA\|([^\|]+)\|[^\ ]+\ ([^,]+)[^$]*$",
# Actinobacteriophage Database
"AD": r"^([^\ ]+)\ [^,]*,[^,]*,\ Cluster\ ([^$]+)$"
}
def __init__(self, raw_text, phage_finder):
self.raw = raw_text.strip()
self.refseq = None
self.name = None
self.db = None
self._parse_phage(raw_text, phage_finder)
def _parse_phage(self, raw_text, phage_finder):
for db, regex in Phage.SUPPORTED_DATABASES.items():
match = re.search(regex, raw_text)
if match is not None:
if db is not "AD":
self.name = match.group(2)
self.refseq = match.group(1)
else:
short_name = match.group(1)
cluster = match.group(2)
self.name = "Mycobacteriophage " + short_name
self.refseq = phage_finder.find_by_phage(short_name,
cluster)
self.db = db
| Rename to follow constant naming conventions | Rename to follow constant naming conventions
| Python | mit | mbonsma/phageParser,mbonsma/phageParser,phageParser/phageParser,mbonsma/phageParser,phageParser/phageParser,goyalsid/phageParser,goyalsid/phageParser,phageParser/phageParser,phageParser/phageParser,mbonsma/phageParser,goyalsid/phageParser |
8c05cb85c47db892dd13abbd91b3948c09b9a954 | statsmodels/tools/__init__.py | statsmodels/tools/__init__.py | from tools import add_constant, categorical
from datautils import Dataset
from statsmodels import NoseWrapper as Tester
test = Tester().test
| from tools import add_constant, categorical
from statsmodels import NoseWrapper as Tester
test = Tester().test
| Remove import of moved file | REF: Remove import of moved file
| Python | bsd-3-clause | josef-pkt/statsmodels,adammenges/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,kiyoto/statsmodels,astocko/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,bzero/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,astocko/statsmodels,bert9bert/statsmodels,DonBeo/statsmodels,Averroes/statsmodels,gef756/statsmodels,edhuckle/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,hainm/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,detrout/debian-statsmodels,yarikoptic/pystatsmodels,bavardage/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,statsmodels/statsmodels,nvoron23/statsmodels,adammenges/statsmodels,wwf5067/statsmodels,yl565/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,alekz112/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,bavardage/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,bsipocz/statsmodels,waynenilsen/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,bashtage/statsmodels,Averroes/statsmodels,adammenges/statsmodels,hlin117/statsmodels,wwf5067/statsmodels,gef756/statsmodels,cbmoore/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,alekz112/statsmodels,bsipocz/statsmodels,saketkc/statsmodels,wdurhamh/statsmodels,musically-ut/statsmodels,kiyoto/statsmodels,phobson/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,kiyoto/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,hainm/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,hlin117/statsmodels,jstoxrocky/statsmodels,bzero/statsmodels,yarikoptic/pystatsmodels,yl565/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,huongttlan/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,astocko/statsmodels,wzbozon/statsmodels,bzero/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,yl565/statsmodels,bashtage/statsmodels,jseabold/statsmodels,kiyoto/statsmodels,phobson/statsmodels,bzero/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,DonBeo/statsmodels,yarikoptic/pystatsmodels,nguyentu1602/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,adammenges/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jstoxrocky/statsmodels,yl565/statsmodels,bashtage/statsmodels,astocko/statsmodels,nguyentu1602/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,wzbozon/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,statsmodels/statsmodels,hainm/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,detrout/debian-statsmodels,phobson/statsmodels,wdurhamh/statsmodels |
b090c7ae0f5407562e3adc818d2f65ccd4ea7e02 | src/arc_utilities/listener.py | src/arc_utilities/listener.py | from copy import deepcopy
from threading import Lock
import rospy
from arc_utilities.ros_helpers import wait_for
class Listener:
def __init__(self, topic_name, topic_type, wait_for_data=False):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
wait_for_data (bool): block constructor until a message has been received
"""
self.data = None
self.lock = Lock()
self.topic_name = topic_name
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
self.get(wait_for_data)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self, block_until_data=True):
"""
Returns the latest msg from the subscribed topic
Parameters:
block_until_data (bool): block if no message has been received yet.
Guarantees a msg is returned (not None)
"""
wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})")
with self.lock:
return deepcopy(self.data)
| from copy import deepcopy
from threading import Lock
import rospy
from arc_utilities.ros_helpers import wait_for
class Listener:
def __init__(self, topic_name, topic_type, wait_for_data=False, callback=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
wait_for_data (bool): block constructor until a message has been received
callback (function taking msg_type): optional callback to be called on the data as we receive it
"""
self.data = None
self.lock = Lock()
self.topic_name = topic_name
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
self.custom_callback = callback
self.get(wait_for_data)
def callback(self, msg):
with self.lock:
self.data = msg
if self.custom_callback is not None:
self.custom_callback(self.data)
def get(self, block_until_data=True):
"""
Returns the latest msg from the subscribed topic
Parameters:
block_until_data (bool): block if no message has been received yet.
Guarantees a msg is returned (not None)
"""
wait_for(lambda: not (block_until_data and self.data is None), 10, f"Listener({self.topic_name})")
with self.lock:
return deepcopy(self.data)
| Allow optional callbacks for Listeners | Allow optional callbacks for Listeners
| Python | bsd-2-clause | WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities,WPI-ARC/arc_utilities |
06599b85719aa0c82da9635939bfcaf20b473fd3 | run_tests.py | run_tests.py | import os
import sys
from django.conf import settings
DIR_NAME = os.path.dirname(__file__)
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'cuser',
),
ROOT_URLCONF='testss.CuserTestCase.urls',
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cuser.middleware.CuserMiddleware',
]
)
from django.test.simple import DjangoTestSuiteRunner
test_runner = DjangoTestSuiteRunner(verbosity=2)
failures = test_runner.run_tests(['cuser', ])
if failures:
sys.exit(failures)
| import os
import sys
import django
from django.conf import settings
DJANGO_VERSION = float('.'.join([str(i) for i in django.VERSION[0:2]]))
DIR_NAME = os.path.dirname(__file__)
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'cuser',
),
ROOT_URLCONF='testss.CuserTestCase.urls',
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cuser.middleware.CuserMiddleware',
]
)
from django.test.simple import DjangoTestSuiteRunner
if DJANGO_VERSION >= 1.7:
django.setup()
test_runner = DjangoTestSuiteRunner(verbosity=2)
failures = test_runner.run_tests(['cuser', ])
if failures:
sys.exit(failures)
| Fix running tests for both django 1.6 and 1.7 | Fix running tests for both django 1.6 and 1.7
| Python | bsd-3-clause | Alir3z4/django-cuser |
c774093bd448b6f8bf9ceef2d68ce1033ba06640 | run_tests.py | run_tests.py | import os
import sys
import django
from django.conf import settings
DJANGO_VERSION = float('.'.join([str(i) for i in django.VERSION[0:2]]))
DIR_NAME = os.path.dirname(__file__)
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'cuser',
),
ROOT_URLCONF='testss.CuserTestCase.urls',
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'cuser.middleware.CuserMiddleware',
]
)
from django.test.simple import DjangoTestSuiteRunner
if DJANGO_VERSION >= 1.7:
django.setup()
test_runner = DjangoTestSuiteRunner(verbosity=2)
failures = test_runner.run_tests(['cuser', ])
if failures:
sys.exit(failures)
| import os
import sys
import django
from django.conf import settings
DIR_NAME = os.path.dirname(__file__)
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'cuser',
),
ROOT_URLCONF='tests.CuserTestCase.urls',
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'cuser.middleware.CuserMiddleware',
]
)
from django.test.runner import DiscoverRunner
django.setup()
test_runner = DiscoverRunner(verbosity=2)
failures = test_runner.run_tests(['cuser', ])
if failures:
sys.exit(failures)
| Remove backward compt and fix test runner | Remove backward compt and fix test runner
| Python | bsd-3-clause | Alir3z4/django-cuser |
7fc62edee40ecedc49b0529e17ac04e4d7bf6865 | door/models.py | door/models.py | from django.db import models
from django.utils import timezone
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
@staticmethod
def get_door_by_name(name):
# Creates the object if it does not exist
try:
door = DoorStatus.objects.get(name=name)
return door
except DoorStatus.DoesNotExist:
door = DoorStatus.objects.create(name=name, datetime=timezone.now())
return door
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
def __str__(self):
return str(self.opened)
| from django.db import models
from django.utils import timezone
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
@staticmethod
def get_door_by_name(name):
# Creates the object if it does not exist
try:
door = DoorStatus.objects.get(name=name)
return door
except DoorStatus.DoesNotExist:
door = DoorStatus.objects.create(name=name, datetime=timezone.now())
return door
class Meta:
verbose_name_plural = "Door Statuses"
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
def __str__(self):
return str(self.opened)
| Change plural name of DoorStatus model | Change plural name of DoorStatus model
| Python | mit | hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website |
4e727b52828122c37b8c398f16ad914898968e83 | examples/rmg/minimal/input.py | examples/rmg/minimal/input.py | # Data sources
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'],
kineticsEstimator = 'rate rules',
)
# List of species
species(
label='ethane',
reactive=True,
structure=SMILES("CC"),
)
# Reaction systems
simpleReactor(
temperature=(1350,'K'),
pressure=(1.0,'bar'),
initialMoleFractions={
"ethane": 1.0,
},
terminationConversion={
'ethane': 0.9,
},
terminationTime=(1e6,'s'),
)
solvation(
solvent='water'
)
simulator(
atol=1e-16,
rtol=1e-8,
)
model(
toleranceKeepInEdge=0.0,
toleranceMoveToCore=0.1,
toleranceInterruptSimulation=0.1,
maximumEdgeSpecies=100000
)
options(
units='si',
saveRestartPeriod=None,
drawMolecules=False,
generatePlots=False,
)
| # Data sources
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = ['training'],
kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'],
kineticsEstimator = 'rate rules',
)
# List of species
species(
label='ethane',
reactive=True,
structure=SMILES("CC"),
)
# Reaction systems
simpleReactor(
temperature=(1350,'K'),
pressure=(1.0,'bar'),
initialMoleFractions={
"ethane": 1.0,
},
terminationConversion={
'ethane': 0.9,
},
terminationTime=(1e6,'s'),
)
simulator(
atol=1e-16,
rtol=1e-8,
)
model(
toleranceKeepInEdge=0.0,
toleranceMoveToCore=0.1,
toleranceInterruptSimulation=0.1,
maximumEdgeSpecies=100000
)
options(
units='si',
saveRestartPeriod=None,
drawMolecules=False,
generatePlots=False,
)
| Remove solvent(water) from minimal example. | Remove solvent(water) from minimal example.
Minimal should be just that - minimal.
This hides issue #165 | Python | mit | enochd/RMG-Py,nickvandewiele/RMG-Py,faribas/RMG-Py,comocheng/RMG-Py,nyee/RMG-Py,chatelak/RMG-Py,pierrelb/RMG-Py,faribas/RMG-Py,comocheng/RMG-Py,pierrelb/RMG-Py,nyee/RMG-Py,enochd/RMG-Py,nickvandewiele/RMG-Py,KEHANG/RMG-Py,chatelak/RMG-Py,KEHANG/RMG-Py |
f8b8f3a223f195704f8cc9753963fbe82f1e4674 | feincms/content/rss/models.py | feincms/content/rss/models.py | from datetime import datetime
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
import feedparser
class RSSContent(models.Model):
link = models.URLField(_('link'))
rendered_content = models.TextField(_('Pre-rendered content'), blank=True, editable=False)
last_updated = models.DateTimeField(_('Last updated'), blank=True, null=True)
class Meta:
abstract = True
def render(self, **kwargs):
return mark_safe(self.rendered_content)
#u'<div class="rsscontent"> RSS: <a href="'+self.link+'">'+self.link+'</a></div')
def cache_content(self):
print u"Getting RSS feed at %s" % (self.link,)
feed = feedparser.parse(self.link)
print u"Pre-rendering content"
self.rendered_content = render_to_string('rsscontent.html', {
'feed': feed})
self.last_updated = datetime.now()
self.save()
| from datetime import datetime
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.template.loader import render_to_string
import feedparser
class RSSContent(models.Model):
title = models.CharField(help_text=_('The rss field is updated several times a day. A change in the title will only be visible on the home page after the next feed update.'), max_length=50)
link = models.URLField(_('link'))
rendered_content = models.TextField(_('Pre-rendered content'), blank=True, editable=False)
last_updated = models.DateTimeField(_('Last updated'), blank=True, null=True)
class Meta:
abstract = True
def render(self, **kwargs):
return mark_safe(self.rendered_content)
#u'<div class="rsscontent"> RSS: <a href="'+self.link+'">'+self.link+'</a></div')
def cache_content(self):
print u"Getting RSS feed at %s" % (self.link,)
feed = feedparser.parse(self.link)
print u"Pre-rendering content"
self.rendered_content = render_to_string('rsscontent.html', {
'title':self.title,
'feed': feed})
self.last_updated = datetime.now()
self.save()
| Add a title field to the RSSContent | Add a title field to the RSSContent
| Python | bsd-3-clause | hgrimelid/feincms,joshuajonah/feincms,michaelkuty/feincms,matthiask/feincms2-content,nickburlett/feincms,michaelkuty/feincms,nickburlett/feincms,matthiask/django-content-editor,michaelkuty/feincms,nickburlett/feincms,hgrimelid/feincms,feincms/feincms,feincms/feincms,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,mjl/feincms,matthiask/feincms2-content,matthiask/feincms2-content,mjl/feincms,nickburlett/feincms,matthiask/django-content-editor,joshuajonah/feincms,hgrimelid/feincms,pjdelport/feincms,feincms/feincms,joshuajonah/feincms,pjdelport/feincms,pjdelport/feincms,mjl/feincms,matthiask/django-content-editor |
fae3e55b1c472cd314676431a34fe6e160418626 | tests/test_command_line.py | tests/test_command_line.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import subprocess
class TestCommandLine(object):
def setup(self):
"""Set up the environment by moving to the demos directory."""
os.chdir("demos")
def teardown(self):
os.chdir("..")
def add(self, *args):
self.db.add_all(args)
self.db.commit()
def test_dallinger_help(self):
output = subprocess.check_output("dallinger", shell=True)
assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import subprocess
from dallinger.command_line import heroku_id
class TestCommandLine(object):
def setup(self):
"""Set up the environment by moving to the demos directory."""
os.chdir("demos")
def teardown(self):
os.chdir("..")
def add(self, *args):
self.db.add_all(args)
self.db.commit()
def test_dallinger_help(self):
output = subprocess.check_output("dallinger", shell=True)
assert("Usage: dallinger [OPTIONS] COMMAND [ARGS]" in output)
def test_heroku_app_id(self):
id = "8fbe62f5-2e33-4274-8aeb-40fc3dd621a0"
assert(len(heroku_id(id)) < 30)
| Test for Heroku app name length | Test for Heroku app name length
| Python | mit | jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger |
ccc6c983411f951ef3906d55d6a0946c7ef93c75 | app/brief_utils.py | app/brief_utils.py | from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
| from flask import abort
from .models import Service
from .validation import get_validation_errors
from .service_utils import filter_services
def validate_brief_data(brief, enforce_required=True, required_fields=None):
errs = get_validation_errors(
'briefs-{}-{}'.format(brief.framework.slug, brief.lot.slug),
brief.data,
enforce_required=enforce_required,
required_fields=required_fields
)
criteria_weighting_keys = ['technicalWeighting', 'culturalWeighting', 'priceWeighting']
# Only check total if all weightings are set
if all(key in brief.data for key in criteria_weighting_keys):
criteria_weightings = sum(brief.data[key] for key in criteria_weighting_keys)
if criteria_weightings != 100:
for key in set(criteria_weighting_keys) - set(errs):
errs[key] = 'total_should_be_100'
if errs:
abort(400, errs)
def is_supplier_eligible_for_brief(supplier, brief):
services = filter_services(
framework_slugs=[brief.framework.slug],
statuses=["published"],
lot_slug=brief.lot.slug,
location=brief.data["location"],
role=brief.data["specialistRole"] if brief.lot.slug == "digital-specialists" else None
)
services = services.filter(Service.supplier_id == supplier.supplier_id)
return services.count() > 0
| Add criteria weighting 100% total validation | Add criteria weighting 100% total validation
Checks the criteria weighting sum if all criteria fields are set.
This relies on all three fields being required.
If the fields don't add up to a 100 an error is added for each field
that doesn't have any other validation errors.
| Python | mit | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api |
e378902b85bf865e0b020bd4afe0e12d593a95a8 | github-keys-check.py | github-keys-check.py | #!/usr/bin/python3
import urllib.request
import argparse
import pwd
import sys
def key_for_user(user):
url = 'https://github.com/%s.keys' % user
with urllib.request.urlopen(url) as f:
return f.read().decode('utf-8')
def validate_user(username, min_uid):
"""
Validates that a given username is:
1. A valid, existing user
2. Has uid > min_uid
"""
user = pwd.getpwnam(username)
return user.pw_uid > min_uid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('username')
parser.add_argument(
'--min-uid', type=int, default=999,
help='uid must be > this to be allowed ssh access. \
Helps keep system users non-sshable'
)
args = parser.parse_args()
if validate_user(args.username, args.min_uid):
print(key_for_user(args.username))
else:
print("Not a valid user")
sys.exit(1)
| #!/usr/bin/python3
import urllib.request
import argparse
import pwd
import grp
import sys
def key_for_user(user):
url = 'https://github.com/%s.keys' % user
with urllib.request.urlopen(url) as f:
return f.read().decode('utf-8')
def validate_user(username, min_uid, in_group):
"""
Validates that a given username is:
1. A valid, existing user
2. Is a member of the group in_group
3. Has uid > min_uid
"""
user = pwd.getpwnam(username)
if in_group is None or username in grp.getgrnam(in_group).gr_mem:
return user.pw_uid > min_uid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('username')
parser.add_argument(
'--min-uid', type=int, default=999,
help='uid must be > this to be allowed ssh access. \
Helps keep system users non-sshable'
)
parser.add_argument(
'--in-group', default=None,
help='Only users in this group can login via github keys'
)
args = parser.parse_args()
if validate_user(args.username, args.min_uid, args.in_group):
print(key_for_user(args.username))
else:
print("Not a valid user")
sys.exit(1)
| Add --in-group parameter to validate users | Add --in-group parameter to validate users
Allows github login only for users in a certain group. This
can be used to whitelist users who are allowed to ssh in
| Python | apache-2.0 | yuvipanda/github-ssh-auth |
d5eccc801634f1b841fbc31de545e530b6d4bd54 | startup.py | startup.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from collections import Counter
import pandas as pd
XLS_NAME = 'startup.xls'
SHEET_NAME = 'STARTUP_15092014'
COL_NAME = 'nat.giuridica'
def main():
xls = pd.ExcelFile(XLS_NAME)
sheet = xls.parse(SHEET_NAME, index_col=None)
for k,v in Counter(sheet[COL_NAME]).most_common():
print "%4d\t%s" % (v, k)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from collections import Counter
import pandas as pd
XLS_NAME = 'startup.xls'
SHEET_NAME = 'STARTUP_15092014'
def main():
xls = pd.ExcelFile(XLS_NAME)
sheet = xls.parse(SHEET_NAME, index_col=None, convert_float=False)
data = [el for el in sheet['nat.giuridica']]
for k,v in Counter(data).most_common():
print "%4d\t%s" % (v, k)
print
data = [el for el in sheet['classe di valore della produzione ultimo anno (1)'] if el in ['A', 'B', 'C', 'D', 'E']]
for k,v in Counter(data).most_common():
print "%4d\t%s" % (v, k)
print
data = [el for el in sheet['classe di addetti ultimo anno (2)'] if el in ['A', 'B', 'C', 'D', 'E']]
for k,v in Counter(data).most_common():
print "%4d\t%s" % (v, k)
if __name__ == '__main__':
main()
| Add pretty output for two more fields. | Add pretty output for two more fields.
| Python | mit | jacquerie/italian-startups-report |
34db881007bf0dad3b7e870d36ab4e4a68b0fd3d | emcee/run_emcee.py | emcee/run_emcee.py | #! /usr/bin/python
from os.path import abspath, join as pathjoin
from shutil import copy
from subprocess import call
from tempfile import mkdtemp
install_dir = mkdtemp(prefix='emcee.')
games_dir = abspath(pathjoin('..', 'games'))
libraries_dir = abspath('libraries')
infra_dir = abspath('infra')
print 'Installing emcee in %s' % (install_dir,)
print 'Games in %s' % (games_dir,)
print 'Libraries in %s' % (libraries_dir,)
print 'Infrastructure files in %s' % (infra_dir,)
copy('emcee.py', install_dir)
copy(pathjoin('libraries', 'pubsub.py'), install_dir)
call([pathjoin(install_dir, 'emcee.py'), games_dir, libraries_dir, infra_dir],
cwd=install_dir)
| #! /usr/bin/python
from os.path import abspath, join as pathjoin
from shutil import copy
from subprocess import Popen
from tempfile import mkdtemp
install_dir = mkdtemp(prefix='emcee.')
games_dir = abspath(pathjoin('..', 'games'))
libraries_dir = abspath('libraries')
infra_dir = abspath('infra')
print 'Installing emcee in %s' % (install_dir,)
print 'Games in %s' % (games_dir,)
print 'Libraries in %s' % (libraries_dir,)
print 'Infrastructure files in %s' % (infra_dir,)
copy('emcee.py', install_dir)
copy('pubsub_ws.py', install_dir)
copy('pubsub_ws_doc.html', install_dir)
copy(pathjoin('libraries', 'pubsub.py'), install_dir)
processes = []
processes.append(Popen([pathjoin(install_dir, 'emcee.py'),
games_dir, libraries_dir, infra_dir],
cwd=install_dir))
processes.append(Popen([pathjoin(install_dir, 'pubsub_ws.py'), infra_dir],
cwd=install_dir))
print 'Now running'
raw_input('Press Enter to stop')
map(lambda(p): p.kill(), processes)
| Deploy pubsub_ws along with emcee | Deploy pubsub_ws along with emcee
| Python | mit | douglassquirrel/alexandra,douglassquirrel/alexandra,douglassquirrel/alexandra |
d5a2a11d23b9f5393b0b39ca2f90978276311f52 | app/slot/routes.py | app/slot/routes.py | from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/dashboard')
# @requires_auth
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms() | from app import app
from app.slot import controller as con
import config
from auth import requires_auth
from flask import render_template
from flask.ext.login import login_required
@app.route('/')
@app.route('/dashboard')
@login_required
def index():
return con.index()
@app.route('/new', methods=['GET', 'POST'])
@requires_auth
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@requires_auth
def receive_sms():
return con.receive_sms() | Add / route to index. Remove old requires_auth decorator. | Add / route to index.
Remove old requires_auth decorator.
| Python | mit | nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT |
ecc816295154a3756e87349b4cff397ebd17b95f | sipa/base.py | sipa/base.py | # -*- coding: utf-8 -*-
"""
Basic utilities for the Flask app
These are basic utilities necessary for the Flask app which are
disjoint from any blueprint.
"""
from flask import request, session
from flask_login import AnonymousUserMixin, LoginManager
from werkzeug.routing import IntegerConverter as BaseIntegerConverter
from sipa.model import backends
login_manager = LoginManager()
class IntegerConverter(BaseIntegerConverter):
"""IntegerConverter supporting negative values
This is a Modification of the standard IntegerConverter which does
not support negative values. See the corresponding `werkzeug
documentation
<http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_.
"""
regex = r'-?\d+'
@login_manager.user_loader
def load_user(username):
"""Loads a User object from/into the session at every request
"""
if request.blueprint == "documents" or request.endpoint == "static":
return AnonymousUserMixin()
dormitory = backends.get_dormitory(session.get('dormitory', None))
if dormitory:
return dormitory.datasource.user_class.get(username)
else:
return AnonymousUserMixin()
| # -*- coding: utf-8 -*-
"""
Basic utilities for the Flask app
These are basic utilities necessary for the Flask app which are
disjoint from any blueprint.
"""
from flask import request, session
from flask_login import AnonymousUserMixin, LoginManager
from flask_babel import gettext
from werkzeug.routing import IntegerConverter as BaseIntegerConverter
from sipa.model import backends
login_manager = LoginManager()
login_manager.login_view = "generic.login"
login_manager.localize_callback = gettext
login_manager.login_message = "Bitte melde Dich an, um die Seite zu sehen."
class IntegerConverter(BaseIntegerConverter):
"""IntegerConverter supporting negative values
This is a Modification of the standard IntegerConverter which does
not support negative values. See the corresponding `werkzeug
documentation
<http://werkzeug.pocoo.org/docs/0.10/routing/#werkzeug.routing.IntegerConverter>`_.
"""
regex = r'-?\d+'
@login_manager.user_loader
def load_user(username):
"""Loads a User object from/into the session at every request
"""
if request.blueprint == "documents" or request.endpoint == "static":
return AnonymousUserMixin()
dormitory = backends.get_dormitory(session.get('dormitory', None))
if dormitory:
return dormitory.datasource.user_class.get(username)
else:
return AnonymousUserMixin()
| Set up flask to handle login redirects. | Set up flask to handle login redirects.
Fix #147.
| Python | mit | lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa,MarauderXtreme/sipa |
4bf614e072a603f4b46038e2f59459c305844553 | ReversiTest.py | ReversiTest.py | import unittest
import reversi
class TestUM(unittest.TestCase):
def setUp(self):
self.board = reversi.ReversiBoard().set_default_board()
def tearDown(self):
self.board = None
def test_up(self):
tuple = (4, 3)
result = self.board.up(tuple)
self.assertEqual(result, (4, 2))
def test_up_right(self):
self.assertEqual(self.board.up_right((2, 2)), (3, 1))
def test_right(self):
self.assertEqual(self.board.right((2, 2)), (3, 2))
def test_out_of_bounds(self):
self.assertIsNone(self.board.right((7, 0)))
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
import unittest
import reversi
class ReversiTest(unittest.TestCase):
def setUp(self):
self.board = reversi.ReversiBoard().set_default_board()
def tearDown(self):
self.board = None
def test_up(self):
tuple = (4, 3)
result = self.board.up(tuple)
self.assertEqual(result, (4, 2))
def test_up_right(self):
self.assertEqual(self.board.up_right((2, 2)), (3, 1))
def test_right(self):
self.assertEqual(self.board.right((2, 2)), (3, 2))
def test_out_of_bounds(self):
self.assertIsNone(self.board.right((7, 0)))
if __name__ == '__main__':
unittest.main()
| Update class name in the unit tester | Update class name in the unit tester
| Python | mit | dmitrym0/reversi-py |
38603c8b35c15c134a0499ac92a7c1f7dee4f526 | send_test_data.py | send_test_data.py | #!/usr/bin/env python
import requests
import datetime
import time
import json
import random
from random import choice
random.seed(datetime.datetime.now())
names = ("vehicle_speed", "fuel_consumed_since_restart", "latitude",
"longitude")
while True:
data = {"records": [
{"timestamp": time.time() * 1000,
"name": choice(names),
"value": random.randint(0, 100)}
]}
print "Sending %s" % data
headers = {'content-type': 'application/json'}
r = requests.post('http://localhost:5000/records', data=json.dumps(data),
headers=headers)
print r
time.sleep(.1)
| #!/usr/bin/env python
import requests
import datetime
import time
import json
import sys
from util import massage_record
names = ("vehicle_speed", "fuel_consumed_since_restart", "latitude",
"longitude")
def send_records(records):
data = {"records": records}
print "Sending %s" % data
headers = {'content-type': 'application/json'}
r = requests.post('http://localhost:5000/records', data=json.dumps(data),
headers=headers)
print r
time.sleep(1)
while True:
filename = sys.argv[1]
try:
records = []
with open(filename, 'r') as trace_file:
for line in trace_file:
timestamp, record = line.split(':', 1)
record = massage_record(json.loads(record), float(timestamp))
records.append(record)
if len(records) == 25:
send_records(records)
records = []
except IOError:
print("No active trace file found at %s" % filename)
| Send test data from actual trace files. | Send test data from actual trace files.
| Python | bsd-3-clause | openxc/web-logging-example,openxc/web-logging-example |
b9ac30b0e428038986de64e069954ee340b991a9 | integration/group.py | integration/group.py | from spec import Spec, eq_
from fabric import ThreadingGroup as Group
class Group_(Spec):
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
group = Group('localhost', 'localhost')
result = group.run('echo foo', hide=True)
# NOTE: currently, the result will only be 1 object, because both of
# them will end up as the same key. Derp.
eq_(result[group[0]].stdout, "foo\n")
| from spec import Spec, eq_
from fabric import ThreadingGroup as Group
class Group_(Spec):
def simple_command(self):
group = Group('localhost', '127.0.0.1')
result = group.run('echo foo', hide=True)
eq_(
[x.stdout.strip() for x in result.values()],
['foo', 'foo'],
)
| Tidy up existing integration test | Tidy up existing integration test
| Python | bsd-2-clause | fabric/fabric |
58a96c65f1e9868fb607cd3ce56dbf60905f62a7 | autoencoder/api.py | autoencoder/api.py | from .io import preprocess
from .train import train
from .network import mlp
def autoencode(count_matrix, kfold=None, dimreduce=True, reconstruct=True,
mask=None, type='normal', activation='relu', testset=False,
learning_rate=1e-2, hidden_size=(256,64,256), l2_coef=0.,
epochs=200, batch_size=32, **kwargs):
x = preprocess(count_matrix, kfold=kfold, mask=mask, testset=testset)
net = mlp(x['shape'][1],
hidden_size=hidden_size,
l2_coef=l2_coef,
activation=activation,
masking=(mask is not None),
loss_type=type)
model, encoder, decoder, loss, extras = net['model'], net['encoder'], \
net['decoder'], net['loss'], \
net['extra_models']
losses = train(x, model, loss,
learning_rate=learning_rate,
epochs=epochs, batch_size=batch_size,
**kwargs)
ret = {'model': model,
'encoder': encoder,
'decoder': decoder,
'extra_models': extras,
'losses': losses}
if dimreduce:
ret['reduced'] = encoder.predict(count_matrix)
if reconstruct:
ret['reconstructed'] = model.predict(count_matrix)
return ret
| from .io import preprocess
from .train import train
from .network import mlp
def autoencode(count_matrix, kfold=None, dimreduce=True, reconstruct=True,
mask=None, type='normal', activation='relu', testset=False,
learning_rate=1e-2, hidden_size=(256,64,256), l2_coef=0.,
epochs=200, batch_size=32, optimizer=None, **kwargs):
x = preprocess(count_matrix, kfold=kfold, mask=mask, testset=testset)
net = mlp(x['shape'][1],
hidden_size=hidden_size,
l2_coef=l2_coef,
activation=activation,
masking=(mask is not None),
loss_type=type)
model, encoder, decoder, loss, extras = net['model'], net['encoder'], \
net['decoder'], net['loss'], \
net['extra_models']
losses = train(x, model, loss,
learning_rate=learning_rate,
epochs=epochs, batch_size=batch_size,
optimizer=optimizer, **kwargs)
ret = {'model': model,
'encoder': encoder,
'decoder': decoder,
'extra_models': extras,
'losses': losses}
if dimreduce:
ret['reduced'] = encoder.predict(count_matrix)
if reconstruct:
ret['reconstructed'] = model.predict(count_matrix)
return ret
| Add optimizer to the API | Add optimizer to the API
Former-commit-id: 3e06c976ad6a7d4409817fb0fa1472237bfa28b7 | Python | apache-2.0 | theislab/dca,theislab/dca,theislab/dca |
1ed040f9d64e12adf964e9f86cc1e18bd8d21593 | scripts/rename.py | scripts/rename.py | import logging
from scripts.util import documents
from scrapi import settings
from scrapi.linter import RawDocument
from scrapi.processing.elasticsearch import es
from scrapi.tasks import normalize, process_normalized, process_raw
logger = logging.getLogger(__name__)
def rename(source, target, dry=True):
assert source != target, "Can't rename {} to {}, names are the same".format(source, target)
count = 0
exceptions = []
for doc in documents(source):
count += 1
try:
raw = RawDocument({
'doc': doc.doc,
'docID': doc.docID,
'source': target,
'filetype': doc.filetype,
'timestamps': doc.timestamps,
'versions': doc.versions
})
if not dry:
process_raw(raw)
process_normalized(normalize(raw, raw['source']), raw)
logger.info('Processed document from {} with id {}'.format(source, raw['docID']))
except Exception as e:
logger.exception(e)
exceptions.append(e)
else:
if not dry:
doc.delete()
es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404])
logger.info('Deleted document from {} with id {}'.format(source, raw['docID']))
if dry:
logger.info('Dry run complete')
for ex in exceptions:
logger.exception(e)
logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
| import logging
from scripts.util import documents
from scrapi import settings
from scrapi.linter import RawDocument
from scrapi.processing.elasticsearch import es
from scrapi.tasks import normalize, process_normalized, process_raw
logger = logging.getLogger(__name__)
def rename(source, target, dry=True):
assert source != target, "Can't rename {} to {}, names are the same".format(source, target)
count = 0
exceptions = []
for doc in documents(source):
count += 1
try:
raw = RawDocument({
'doc': doc.doc,
'docID': doc.docID,
'source': target,
'filetype': doc.filetype,
'timestamps': doc.timestamps,
'versions': doc.versions
})
if not dry:
process_raw(raw)
process_normalized(normalize(raw, raw['source']), raw)
logger.info('Processed document from {} with id {}'.format(source, raw['docID']))
except Exception as e:
logger.exception(e)
exceptions.append(e)
else:
if not dry:
# doc.delete()
es.delete(index=settings.ELASTIC_INDEX, doc_type=source, id=raw['docID'], ignore=[404])
es.delete(index='share_v1', doc_type=source, id=raw['docID'], ignore=[404])
logger.info('Deleted document from {} with id {}'.format(source, raw['docID']))
if dry:
logger.info('Dry run complete')
for ex in exceptions:
logger.exception(e)
logger.info('{} documents processed, with {} exceptions'.format(count, len(exceptions)))
| Stop cassandra from deleting documents, delete documents from old index as well | Stop cassandra from deleting documents, delete documents from old index as well
| Python | apache-2.0 | erinspace/scrapi,mehanig/scrapi,alexgarciac/scrapi,felliott/scrapi,fabianvf/scrapi,icereval/scrapi,jeffreyliu3230/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,fabianvf/scrapi,felliott/scrapi |
473121ce5a3caa20576d02c79669408fd4177a43 | features/steps/interactive.py | features/steps/interactive.py | import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
UP_ARROW = "\x1b[A"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, UP_ARROW)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
| import time, pexpect, re
import nose.tools as nt
import subprocess as spr
PROMPT = "root@\w+:[^\r]+"
ENTER = "\n"
def type(process, input_):
process.send(input_.encode())
process.expect(PROMPT)
# Remove the typed input from the returned standard out
return re.sub(re.escape(input_.strip()), '', process.before).strip()
@when(u'I run the interactive command')
def step_impl(context):
process = pexpect.spawn(context.text)
time.sleep(0.5)
type(process, ENTER)
class Output(object):
pass
context.output = Output()
context.output.stderr = ""
context.output.stdout = ""
context.process = process
@when(u'I type')
def step_impl(context):
cmd = context.text.strip() + "\n"
context.output.stdout = type(context.process, cmd)
@when(u'I exit the shell')
def step_impl(context):
context.process.send("exit\n")
| Use "\n" to fix waiting for prompt in feature tests on CI | Use "\n" to fix waiting for prompt in feature tests on CI
| Python | mit | michaelbarton/command-line-interface,pbelmann/command-line-interface,bioboxes/command-line-interface,pbelmann/command-line-interface,michaelbarton/command-line-interface,bioboxes/command-line-interface |
624276b80b6d69b788b2f48691941cd89847237b | software/Pi/ui.py | software/Pi/ui.py | """
Handles LED output for the Raspberry Pi 3
Image tracking software. Imported using 'import ui'
Version: 5/06/17
Dependencies: RPi.GPIO
Note: will only work on a Raspberry Pi!
"""
import RPi.GPIO as gpio
import time
ledPin = 16 #GPIO23
#Set up RPi GPIO
def setup():
gpio.setmode(gpio.BOARD)
gpio.setup(ledPin, gpio.OUT)
def blink(n):
for i in range(0, n):
gpio.output(ledPin, True)
time.sleep(0.5)
gpio.output(ledPin, False)
time.sleep(0.5)
| """
Handles LED output for the Raspberry Pi 3
Image tracking software. Imported using 'import ui'
Version: 5/06/17
Dependencies: RPi.GPIO
Note: will only work on a Raspberry Pi!
"""
import RPi.GPIO as gpio
import time
ledPin = 16 #GPIO23
#Set up RPi GPIO
def setup():
gpio.setmode(gpio.BOARD)
gpio.setwarnings(False)
gpio.setup(ledPin, gpio.OUT)
def blink(n):
for i in range(0, n):
gpio.output(ledPin, True)
time.sleep(0.5)
gpio.output(ledPin, False)
time.sleep(0.5)
| Disable warnings for GPIO channels... | Disable warnings for GPIO channels...
| Python | mit | AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking |
6d9b9d043e1d27e93f9cbf221ddc29db8760f789 | config.py | config.py | MINUTES = 60
HOURS = MINUTES * 60
DAYS = HOURS * 24
WEEKS = DAYS * 7
YEARS = DAYS * 365
# Allows you to do `3 * DAYS` to make configuration clearer.
# Technically the value for YEARS is wrong, but it's close enough.
POW_DIGITS = 2
PROTOCOL_VERSION = b"0xdeadbeef"
assert len(PROTOCOL_VERSION) <= 16
PROTOCOL_VERSION = (bytes(16) + PROTOCOL_VERSION)[-16:]
PORT = 3514
BROADPORT = 5252
VERBOSE = False
MSGDIR = "msgs/"
LISTEN_FOR_BROADCASTS = True
CREATE_BROADCASTS = True
PRUNE_TIME = 7
PRUNE_DELETE = True
| MINUTES = 60
HOURS = MINUTES * 60
DAYS = HOURS * 24
WEEKS = DAYS * 7
YEARS = DAYS * 365
# Allows you to do `3 * DAYS` to make configuration clearer.
# Technically the value for YEARS is wrong, but it's close enough.
POW_DIGITS = 2
PROTOCOL_VERSION = b"0xdeadbeef"
assert len(PROTOCOL_VERSION) <= 16
PROTOCOL_VERSION = (bytes(16) + PROTOCOL_VERSION)[-16:]
PORT = 3514
BROADPORT = 5252
VERBOSE = False
MSGDIR = "msgs/"
LISTEN_FOR_BROADCASTS = True
CREATE_BROADCASTS = True
PRUNE_TIME = 7 * DAYS
PRUNE_DELETE = True
| Increase prune time to 7 days, not 7 seconds. | Increase prune time to 7 days, not 7 seconds.
For testing, this should be lowered, but not pushed.
| Python | mit | 5225225/node,5225225/node |
c266fbd7a3478d582dc0d6c88fc5e3d8b7a8f62f | survey/views/survey_result.py | survey/views/survey_result.py | # -*- coding: utf-8 -*-
import datetime
import os
from django.http.response import HttpResponse
from django.shortcuts import get_object_or_404
from survey.management.survey2csv import Survey2CSV
from survey.models import Survey
def serve_result_csv(request, pk):
survey = get_object_or_404(Survey, pk=pk)
try:
latest_answer = survey.latest_answer_date()
csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey))
csv_time = datetime.datetime.fromtimestamp(csv_modification_time)
csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo)
if latest_answer > csv_time:
# If the file was generated before the last answer, generate it.
Survey2CSV.generate_file(survey)
except OSError:
# If the file do not exist, generate it.
Survey2CSV.generate_file(survey)
with open(Survey2CSV.file_name(survey), 'r') as f:
response = HttpResponse(f.read(), content_type='text/csv')
response['mimetype='] = 'application/force-download'
cd = u'attachment; filename="{}.csv"'.format(survey.name)
response['Content-Disposition'] = cd
return response
| # -*- coding: utf-8 -*-
import datetime
import os
from django.http.response import HttpResponse
from django.shortcuts import get_object_or_404
from survey.management.survey2csv import Survey2CSV
from survey.models import Survey
def serve_result_csv(request, pk):
survey = get_object_or_404(Survey, pk=pk)
try:
latest_answer = survey.latest_answer_date()
csv_modification_time = os.path.getmtime(Survey2CSV.file_name(survey))
csv_time = datetime.datetime.fromtimestamp(csv_modification_time)
csv_time = csv_time.replace(tzinfo=latest_answer.tzinfo)
if latest_answer > csv_time:
# If the file was generated before the last answer, generate it.
Survey2CSV.generate_file(survey)
except OSError:
# If the file do not exist, generate it.
Survey2CSV.generate_file(survey)
with open(Survey2CSV.file_name(survey), 'r') as f:
response = HttpResponse(f.read(), content_type='text/csv')
cd = u'attachment; filename="{}.csv"'.format(survey.name)
response['Content-Disposition'] = cd
return response
| Fix - Apache error AH02429 | Fix - Apache error AH02429
Response header name 'mimetype=' contains invalid characters, aborting request
| Python | agpl-3.0 | Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey |
9a121f309ded039f770339d51b43d0933a98d982 | app/main/views.py | app/main/views.py | from flask import render_template, current_app, flash, redirect, url_for
from . import main
from forms import ContactForm
from ..email import send_email
@main.route('/')
def index():
return render_template('index.html')
@main.route('/about')
def about():
return render_template('about.html')
@main.route('/menu')
def menu():
return render_template('menu.html')
@main.route('/hours-and-directions')
def hours():
return render_template('hours-and-directions.html')
@main.route('/contact', methods=['GET', 'POST'])
def contact():
contact_form = ContactForm()
if contact_form.validate_on_submit():
name = contact_form.name.data
email = contact_form.email.data
phone = contact_form.phone.data
message = contact_form.message.data
send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry',
'mail/message', name=name, email=email, phone=phone, message=message)
flash('Your message has been sent. We will be in contact with you shortly.')
return redirect(url_for('main.contact'))
return render_template('contact.html', contact_form = contact_form)
@main.route('/imageScroll')
def imageScroll():
return render_template('imageScroll.html')
| from flask import render_template, current_app, flash, redirect, url_for, send_from_directory
from . import main
from forms import ContactForm
from ..email import send_email
@main.route('/<path:filename>')
def static_from_root(filename):
return send_from_directory(current_app.static_folder, filename)
@main.route('/')
def index():
return render_template('index.html')
@main.route('/about')
def about():
return render_template('about.html')
@main.route('/menu')
def menu():
return render_template('menu.html')
@main.route('/hours-and-directions')
def hours():
return render_template('hours-and-directions.html')
@main.route('/contact', methods=['GET', 'POST'])
def contact():
contact_form = ContactForm()
if contact_form.validate_on_submit():
name = contact_form.name.data
email = contact_form.email.data
phone = contact_form.phone.data
message = contact_form.message.data
send_email(current_app.config['MAIL_USERNAME'], 'Robata Grill Inquiry',
'mail/message', name=name, email=email, phone=phone, message=message)
flash('Your message has been sent. We will be in contact with you shortly.')
return redirect(url_for('main.contact'))
return render_template('contact.html', contact_form = contact_form)
@main.route('/imageScroll')
def imageScroll():
return render_template('imageScroll.html')
| Add additional view for sitemap.xml | Add additional view for sitemap.xml
| Python | mit | jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi,jordandietch/workforsushi |
49606a1a14696f41a03c7b69cbcf382cab50210c | t/integration/test_py_amqp.py | t/integration/test_py_amqp.py | from __future__ import absolute_import, unicode_literals
import os
import pytest
import kombu
from .common import BasicFunctionality
def get_connection(
hostname, port, vhost):
return kombu.Connection('amqp://{}:{}'.format(hostname, port))
@pytest.fixture()
def connection(request):
# this fixture yields plain connections to broker and TLS encrypted
return get_connection(
hostname=os.environ.get('RABBITMQ_HOST', 'localhost'),
port=os.environ.get('RABBITMQ_5672_TCP', '5672'),
vhost=getattr(
request.config, "slaveinput", {}
).get("slaveid", None),
)
@pytest.mark.env('py-amqp')
@pytest.mark.flaky(reruns=5, reruns_delay=2)
class test_PyAMQPBasicFunctionality(BasicFunctionality):
pass
| from __future__ import absolute_import, unicode_literals
import os
import pytest
import kombu
from .common import BasicFunctionality
def get_connection(
hostname, port, vhost):
return kombu.Connection('pyamqp://{}:{}'.format(hostname, port))
@pytest.fixture()
def connection(request):
# this fixture yields plain connections to broker and TLS encrypted
return get_connection(
hostname=os.environ.get('RABBITMQ_HOST', 'localhost'),
port=os.environ.get('RABBITMQ_5672_TCP', '5672'),
vhost=getattr(
request.config, "slaveinput", {}
).get("slaveid", None),
)
@pytest.mark.env('py-amqp')
@pytest.mark.flaky(reruns=5, reruns_delay=2)
class test_PyAMQPBasicFunctionality(BasicFunctionality):
pass
| Use explicit py-amqp transport instead of amqp in integration tests | Use explicit py-amqp transport instead of amqp in integration tests
| Python | bsd-3-clause | ZoranPavlovic/kombu,ZoranPavlovic/kombu,celery/kombu |
36bde060bbdb4cf9d0396719b8b82952a73bf2b5 | bucky/collector.py | bucky/collector.py |
import time
import multiprocessing
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
class StatsCollector(multiprocessing.Process):
def __init__(self, queue):
super(StatsCollector, self).__init__()
self.queue = queue
def close(self):
pass
def run(self):
setproctitle("bucky: %s" % self.__class__.__name__)
err = 0
while True:
start_timestamp = time.time()
if not self.collect():
err = min(err + 1, 2)
else:
err = 0
stop_timestamp = time.time()
sleep_time = (err + 1) * self.interval - (stop_timestamp - start_timestamp)
if sleep_time > 0.1:
time.sleep(sleep_time)
def collect(self):
raise NotImplementedError()
def add_stat(self, name, value, timestamp, **metadata):
if metadata:
if self.metadata:
metadata.update(self.metadata)
else:
metadata = self.metadata
if metadata:
self.queue.put((None, name, value, timestamp, metadata))
else:
self.queue.put((None, name, value, timestamp))
def merge_dicts(self, *dicts):
ret = {}
for d in dicts:
if d:
ret.update(d)
return ret
|
import time
import multiprocessing
try:
from setproctitle import setproctitle
except ImportError:
def setproctitle(title):
pass
class StatsCollector(multiprocessing.Process):
def __init__(self, queue):
super(StatsCollector, self).__init__()
self.queue = queue
def close(self):
pass
def run(self):
setproctitle("bucky: %s" % self.__class__.__name__)
interval = self.interval
while True:
start_timestamp = time.time()
interval = self.interval if self.collect() else interval+interval
stop_timestamp = time.time()
interval = min(interval, 300)
interval = interval - (stop_timestamp - start_timestamp)
if interval > 0.1:
time.sleep(interval)
def collect(self):
raise NotImplementedError()
def add_stat(self, name, value, timestamp, **metadata):
if metadata:
if self.metadata:
metadata.update(self.metadata)
else:
metadata = self.metadata
if metadata:
self.queue.put((None, name, value, timestamp, metadata))
else:
self.queue.put((None, name, value, timestamp))
def merge_dicts(self, *dicts):
ret = {}
for d in dicts:
if d:
ret.update(d)
return ret
| Change the back-off algo for failures | Change the back-off algo for failures
| Python | apache-2.0 | jsiembida/bucky3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.