commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
f56d8b35aa7d1d2c06d5c98ef49696e829459042
log_request_id/tests.py
log_request_id/tests.py
import logging from django.test import TestCase, RequestFactory from log_request_id.middleware import RequestIDMiddleware from testproject.views import test_view class RequestIDLoggingTestCase(TestCase): def setUp(self): self.factory = RequestFactory() self.handler = logging.getLogger('testproject').handlers[0] def test_id_generation(self): request = self.factory.get('/') middleware = RequestIDMiddleware() middleware.process_request(request) self.assertTrue(hasattr(request, 'id')) test_view(request) self.assertTrue(request.id in self.handler.messages[0])
import logging from django.test import TestCase, RequestFactory from log_request_id.middleware import RequestIDMiddleware from testproject.views import test_view class RequestIDLoggingTestCase(TestCase): def setUp(self): self.factory = RequestFactory() self.handler = logging.getLogger('testproject').handlers[0] self.handler.messages = [] def test_id_generation(self): request = self.factory.get('/') middleware = RequestIDMiddleware() middleware.process_request(request) self.assertTrue(hasattr(request, 'id')) test_view(request) self.assertTrue(request.id in self.handler.messages[0]) def test_external_id_in_http_header(self): with self.settings(LOG_REQUEST_ID_HEADER='REQUEST_ID_HEADER'): request = self.factory.get('/') request.META['REQUEST_ID_HEADER'] = 'some_request_id' middleware = RequestIDMiddleware() middleware.process_request(request) self.assertEqual(request.id, 'some_request_id') test_view(request) self.assertTrue('some_request_id' in self.handler.messages[0])
Add test for externally-generated request IDs
Add test for externally-generated request IDs
Python
bsd-2-clause
dabapps/django-log-request-id
1002f40dc0ca118308144d3a51b696815501519f
account_direct_debit/wizard/payment_order_create.py
account_direct_debit/wizard/payment_order_create.py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2013 Therp BV (<http://therp.nl>). # # All other contributions are (C) by their respective contributors # # All Rights Reserved # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, api class PaymentOrderCreate(models.TransientModel): _inherit = 'payment.order.create' @api.multi def extend_payment_order_domain(self, payment_order, domain): super(PaymentOrderCreate, self).extend_payment_order_domain( payment_order, domain) if payment_order.payment_order_type == 'debit': # With the new system with bank.payment.line, we want # to be able to have payment lines linked to customer # invoices and payment lines linked to customer refunds # in order to debit the customer of the total of his # invoices minus his refunds domain += [('account_id.type', '=', 'receivable')] return True
# -*- coding: utf-8 -*- # © 2013 Therp BV (<http://therp.nl>) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, api class PaymentOrderCreate(models.TransientModel): _inherit = 'payment.order.create' @api.multi def extend_payment_order_domain(self, payment_order, domain): super(PaymentOrderCreate, self).extend_payment_order_domain( payment_order, domain) if payment_order.payment_order_type == 'debit': # For receivables, propose all unreconciled debit lines, # including partially reconciled ones. # If they are partially reconciled with a customer refund, # the residual will be added to the payment order. # # For payables, normally suppliers will be the initiating party # for possible supplier refunds (via a transfer for example), # or they keep the amount for decreasing future supplier invoices, # so there's not too much sense for adding them to a direct debit # order domain += [ ('debit', '>', 0), ('account_id.type', '=', 'receivable'), ] return True
Fix move lines domain for direct debits
[FIX] account_direct_debit: Fix move lines domain for direct debits
Python
agpl-3.0
acsone/bank-payment,diagramsoftware/bank-payment,CompassionCH/bank-payment,CompassionCH/bank-payment,open-synergy/bank-payment,hbrunn/bank-payment
45f0cd033938a5c28907e84fcbc8f5f8e93d0c65
st2actions/st2actions/cmd/history.py
st2actions/st2actions/cmd/history.py
import eventlet import os import sys from oslo.config import cfg from st2common import log as logging from st2common.models.db import db_setup from st2common.models.db import db_teardown from st2actions import config from st2actions import history LOG = logging.getLogger(__name__) eventlet.monkey_patch( os=True, select=True, socket=True, thread=False if '--use-debugger' in sys.argv else True, time=True) def _setup(): # Parse args to setup config. config.parse_args() # Setup logging. logging.setup(cfg.CONF.history.logging) # All other setup which requires config to be parsed and logging to be correctly setup. db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port) def _run_worker(): LOG.info('(PID=%s) History worker started.', os.getpid()) try: history.work() except (KeyboardInterrupt, SystemExit): LOG.info('(PID=%s) History worker stopped.', os.getpid()) except: return 1 return 0 def _teardown(): db_teardown() def main(): try: _setup() return _run_worker() except: LOG.exception('(PID=%s) History worker quit due to exception.', os.getpid()) return 1 finally: _teardown()
import eventlet import os import sys from oslo.config import cfg from st2common import log as logging from st2common.models.db import db_setup from st2common.models.db import db_teardown from st2actions import config from st2actions import history LOG = logging.getLogger(__name__) eventlet.monkey_patch( os=True, select=True, socket=True, thread=False if '--use-debugger' in sys.argv else True, time=True) def _setup(): # Parse args to setup config. config.parse_args() # Setup logging. logging.setup(cfg.CONF.history.logging) # All other setup which requires config to be parsed and logging to be correctly setup. db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port) def _teardown(): db_teardown() def main(): try: _setup() LOG.info('(PID=%s) Historian started.', os.getpid()) history.work() except (KeyboardInterrupt, SystemExit): LOG.info('(PID=%s) Historian stopped.', os.getpid()) return 0 except: LOG.exception('(PID=%s) Historian quit due to exception.', os.getpid()) return 1 finally: _teardown() return 0
Move code from _run_worker into main
Move code from _run_worker into main
Python
apache-2.0
dennybaa/st2,grengojbo/st2,pinterb/st2,alfasin/st2,jtopjian/st2,peak6/st2,nzlosh/st2,punalpatel/st2,armab/st2,StackStorm/st2,nzlosh/st2,peak6/st2,alfasin/st2,Plexxi/st2,StackStorm/st2,emedvedev/st2,nzlosh/st2,pixelrebel/st2,Plexxi/st2,lakshmi-kannan/st2,lakshmi-kannan/st2,tonybaloney/st2,pixelrebel/st2,StackStorm/st2,dennybaa/st2,emedvedev/st2,StackStorm/st2,tonybaloney/st2,punalpatel/st2,lakshmi-kannan/st2,Plexxi/st2,Itxaka/st2,emedvedev/st2,peak6/st2,jtopjian/st2,dennybaa/st2,armab/st2,pinterb/st2,alfasin/st2,nzlosh/st2,pixelrebel/st2,tonybaloney/st2,jtopjian/st2,Plexxi/st2,armab/st2,punalpatel/st2,Itxaka/st2,grengojbo/st2,grengojbo/st2,Itxaka/st2,pinterb/st2
989ff44354d624906d72f20aac933a9243214cf8
corehq/dbaccessors/couchapps/cases_by_server_date/by_owner_server_modified_on.py
corehq/dbaccessors/couchapps/cases_by_server_date/by_owner_server_modified_on.py
from __future__ import absolute_import from __future__ import unicode_literals from casexml.apps.case.models import CommCareCase from dimagi.utils.parsing import json_format_datetime def get_case_ids_modified_with_owner_since(domain, owner_id, reference_date): """ Gets all cases with a specified owner ID that have been modified since a particular reference_date (using the server's timestamp) """ return [ row['id'] for row in CommCareCase.get_db().view( 'cases_by_server_date/by_owner_server_modified_on', startkey=[domain, owner_id, json_format_datetime(reference_date)], endkey=[domain, owner_id, {}], include_docs=False, reduce=False ) ]
from __future__ import absolute_import from __future__ import unicode_literals from casexml.apps.case.models import CommCareCase from dimagi.utils.parsing import json_format_datetime def get_case_ids_modified_with_owner_since(domain, owner_id, reference_date, until_date=None): """ Gets all cases with a specified owner ID that have been modified since a particular reference_date (using the server's timestamp) """ return [ row['id'] for row in CommCareCase.get_db().view( 'cases_by_server_date/by_owner_server_modified_on', startkey=[domain, owner_id, json_format_datetime(reference_date)], endkey=[domain, owner_id, {} if not until_date else json_format_datetime(until_date)], include_docs=False, reduce=False ) ]
Make get_case_ids_modified_with_owner_since accept an end date as well
Make get_case_ids_modified_with_owner_since accept an end date as well
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
16d65c211b00871ac7384baa3934d88410e2c977
tests/test_planetary_test_data_2.py
tests/test_planetary_test_data_2.py
# -*- coding: utf-8 -*- from planetary_test_data import PlanetaryTestDataProducts import os def test_planetary_test_data_object(): """Tests simple PlanetaryTestDataProducts attributes.""" data = PlanetaryTestDataProducts() assert data.tags == ['core'] assert data.all_products is None # handle running this test individually versus within a suite if os.path.exists('tests'): assert data.directory == os.path.join('tests', 'mission_data') else: assert data.directory == os.path.join('mission_data') assert os.path.exists(data.data_path) def test_planetary_test_core_products(): """Tests the list of core data products.""" data = PlanetaryTestDataProducts() assert data.tags == ['core'] assert u'2p129641989eth0361p2600r8m1.img' in data.products assert u'1p190678905erp64kcp2600l8c1.img' in data.products def test_planetary_test_all_products(): """Tests the list of all data products.""" data = PlanetaryTestDataProducts(all_products=True) assert len(data.products) == 151 assert data.all_products is True
# -*- coding: utf-8 -*- from planetary_test_data import PlanetaryTestDataProducts import os def test_planetary_test_data_object(): """Tests simple PlanetaryTestDataProducts attributes.""" data = PlanetaryTestDataProducts() assert data.tags == ['core'] assert data.all_products is None # handle running this test individually versus within a suite if os.path.exists('tests'): assert data.directory == os.path.join('tests', 'mission_data') else: assert data.directory == os.path.join('mission_data') assert os.path.exists(data.data_path) def test_planetary_test_core_products(): """Tests the list of core data products.""" data = PlanetaryTestDataProducts() assert data.tags == ['core'] assert u'2p129641989eth0361p2600r8m1.img' in data.products assert u'1p190678905erp64kcp2600l8c1.img' in data.products assert u'0047MH0000110010100214C00_DRCL.IMG' in data.products assert u'1p134482118erp0902p2600r8m1.img' in data.products assert u'h58n3118.img' in data.products assert u'r01090al.img' in data.products def test_planetary_test_all_products(): """Tests the list of all data products.""" data = PlanetaryTestDataProducts(all_products=True) assert len(data.products) == 151 assert data.all_products is True
Update test to account for more core products
Update test to account for more core products
Python
bsd-3-clause
pbvarga1/planetary_test_data,planetarypy/planetary_test_data
1ee414611fa6e01516d545bb284695a62bd69f0a
rtrss/daemon.py
rtrss/daemon.py
import sys import os import logging import atexit from rtrss.basedaemon import BaseDaemon _logger = logging.getLogger(__name__) class WorkerDaemon(BaseDaemon): def run(self): _logger.info('Daemon started ith pid %d', os.getpid()) from rtrss.worker import app_init, worker_action worker_action('import_categories') # TODO run() _logger.info('Daemon is done and exiting') def start(self): _logger.info('Starting daemon') super(WorkerDaemon, self).start() def stop(self): _logger.info('Stopping daemon') super(WorkerDaemon, self).stop() def restart(self): _logger.info('Restarting daemon') super(WorkerDaemon, self).restart() def make_daemon(config): '''Returns WorkerDaemon instance''' pidfile = os.path.join(config.DATA_DIR, 'daemon.pid') logdir = os.environ.get('OPENSHIFT_LOG_DIR') or config.DATA_DIR logfile = os.path.join(logdir, 'daemon.log') return WorkerDaemon(pidfile, stdout=logfile, stderr=logfile)
import os import logging from rtrss.basedaemon import BaseDaemon _logger = logging.getLogger(__name__) class WorkerDaemon(BaseDaemon): def run(self): _logger.info('Daemon started ith pid %d', os.getpid()) from rtrss.worker import worker_action worker_action('run') _logger.info('Daemon is done and exiting') def start(self): _logger.info('Starting daemon') super(WorkerDaemon, self).start() def stop(self): _logger.info('Stopping daemon') super(WorkerDaemon, self).stop() def restart(self): _logger.info('Restarting daemon') super(WorkerDaemon, self).restart() def make_daemon(config): '''Returns WorkerDaemon instance''' pidfile = os.path.join(config.DATA_DIR, 'daemon.pid') logdir = os.environ.get('OPENSHIFT_LOG_DIR') or config.DATA_DIR logfile = os.path.join(logdir, 'daemon.log') return WorkerDaemon(pidfile, stdout=logfile, stderr=logfile)
Change debug action to production
Change debug action to production
Python
apache-2.0
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
7ab9f281bf891e00d97278e3dba73eaeffe3799a
kaggle/titanic/categorical_and_scaler_prediction.py
kaggle/titanic/categorical_and_scaler_prediction.py
import pandas def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']] print(train) if __name__ == '__main__': main()
import pandas from sklearn.naive_bayes import MultinomialNB from sklearn.cross_validation import train_test_split from sklearn.preprocessing import LabelEncoder def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']][:20] gender_label = LabelEncoder() train.Sex = gender_label.fit_transform(train.Sex) X = train[['Sex', 'Fare']] y = train['Survived'] X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.33, random_state=42) clf = MultinomialNB() clf.fit(X_train, y_train) print(clf.predict(X_test)) if __name__ == '__main__': main()
Make predictions with gender and ticket price
Make predictions with gender and ticket price
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
9a4d608471b31550038d8ce43f6515bbb330e68a
tests.py
tests.py
import tcpstat
#!/usr/bin/python # -*- coding: utf-8 -*- # The MIT License (MIT) # Copyright (c) 2014 Ivan Cai # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import tcpstat
Fix build failed on scrutinizer-ci
Fix build failed on scrutinizer-ci
Python
mit
caizixian/tcpstat,caizixian/tcpstat
4d9ae9a8041d56b0494fd94d7a14fead82a2e536
templated_email/utils.py
templated_email/utils.py
# From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template from django.template import Context from django.template.loader_tags import BlockNode, ExtendsNode class BlockNotFound(Exception): pass def _iter_nodes(template, context, name, block_lookups): for node in template.template.nodelist: if isinstance(node, BlockNode) and node.name == name: # Rudimentary handling of extended templates, for issue #3 for i in xrange(len(node.nodelist)): n = node.nodelist[i] if isinstance(n, BlockNode) and n.name in block_lookups: node.nodelist[i] = block_lookups[n.name] context.template = template.template return node.render(context) elif isinstance(node, ExtendsNode): lookups = { n.name: n for n in node.nodelist if isinstance(n, BlockNode) } lookups.update(block_lookups) return _get_node(node.get_parent(context), context, name, lookups) raise BlockNotFound("Node '%s' could not be found in template." % name) def _get_node(template, context=Context(), name='subject', block_lookups={}): try: return _iter_nodes(template, context, name, block_lookups) except TypeError: context.template = template.template return _iter_nodes(template.template, context, name, block_lookups)
# From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template from django.template import Context from django.template.loader_tags import BlockNode, ExtendsNode class BlockNotFound(Exception): pass def _iter_nodes(template, context, name, block_lookups): for node in template.template.nodelist: if isinstance(node, BlockNode) and node.name == name: # Rudimentary handling of extended templates, for issue #3 for i in range(len(node.nodelist)): n = node.nodelist[i] if isinstance(n, BlockNode) and n.name in block_lookups: node.nodelist[i] = block_lookups[n.name] context.template = template.template return node.render(context) elif isinstance(node, ExtendsNode): lookups = { n.name: n for n in node.nodelist if isinstance(n, BlockNode) } lookups.update(block_lookups) return _get_node(node.get_parent(context), context, name, lookups) raise BlockNotFound("Node '%s' could not be found in template." % name) def _get_node(template, context=Context(), name='subject', block_lookups={}): try: return _iter_nodes(template, context, name, block_lookups) except TypeError: context.template = template.template return _iter_nodes(template.template, context, name, block_lookups)
Change xrange for range for py3
Change xrange for range for py3
Python
mit
mypebble/django-templated-email,mypebble/django-templated-email
72125d84bf91201e15a93acb60fbc8f59af9aae8
plugins/PerObjectSettingsTool/PerObjectSettingsTool.py
plugins/PerObjectSettingsTool/PerObjectSettingsTool.py
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Tool import Tool from UM.Scene.Selection import Selection from UM.Application import Application from . import PerObjectSettingsModel class PerObjectSettingsTool(Tool): def __init__(self): super().__init__() self.setExposedProperties("Model", "SelectedIndex") def event(self, event): return False def getModel(self): return PerObjectSettingsModel.PerObjectSettingsModel() def getSelectedIndex(self): selected_object_id = id(Selection.getSelectedObject(0)) index = self.getModel().find("id", selected_object_id) return index
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Tool import Tool from UM.Scene.Selection import Selection from UM.Application import Application from UM.Qt.ListModel import ListModel from . import PerObjectSettingsModel class PerObjectSettingsTool(Tool): def __init__(self): super().__init__() self._model = None self.setExposedProperties("Model", "SelectedIndex") def event(self, event): return False def getModel(self): if not self._model: self._model = PerObjectSettingsModel.PerObjectSettingsModel() #For some reason, casting this model to itself causes the model to properly be cast to a QVariant, even though it ultimately inherits from QVariant. #Yeah, we have no idea either... return PerObjectSettingsModel.PerObjectSettingsModel(self._model) def getSelectedIndex(self): selected_object_id = id(Selection.getSelectedObject(0)) index = self.getModel().find("id", selected_object_id) return index
Fix problem with casting to QVariant
Fix problem with casting to QVariant This is a magical fix that Nallath and I found for a problem that shouldn't exist in the first place and sometimes doesn't exist at all and in the same time is a superposition of existing and not existing and it's all very complicated and an extremely weird hack. Casting this object to itself properly makes it castable to QVariant. Contributes to issue CURA-458.
Python
agpl-3.0
hmflash/Cura,senttech/Cura,senttech/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,fieldOfView/Cura,fieldOfView/Cura,totalretribution/Cura,totalretribution/Cura,Curahelper/Cura,Curahelper/Cura,hmflash/Cura
acdf380a5463ae8bd9c6dc76ce02069371b6f5fd
backend/restapp/restapp/urls.py
backend/restapp/restapp/urls.py
"""restapp URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), ]
"""restapp URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from books.models import Author, Book from rest_framework import routers, serializers, viewsets class AuthorSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Author fields = ('first_name', 'last_name', 'description') class AuthorViewSet(viewsets.ModelViewSet): queryset = Author.objects.all() serializer_class = AuthorSerializer class BookSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Book fields = ('name', 'description', 'author') class BookViewSet(viewsets.ModelViewSet): queryset = Book.objects.all() serializer_class = BookSerializer router = routers.DefaultRouter() router.register(r'authors', AuthorViewSet) router.register(r'books', BookViewSet) urlpatterns = [ url(r'^', include(router.urls)), url(r'^admin/', admin.site.urls), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), ]
Add simple serializers for books and authors
Add simple serializers for books and authors
Python
mit
TomaszGabrysiak/django-rest-angular-seed,TomaszGabrysiak/django-rest-angular-seed,TomaszGabrysiak/django-rest-angular-seed
a42bc9cfc862fd91a498b2738caabf7ca945168b
Pig-Latin/pig_latin.py
Pig-Latin/pig_latin.py
class Pig_latin(object): vowels = ["a", "e" , "i", "o", "u"] def __init__(self, sentence): self.sentence = sentence def convert_sentence(self): new_sentence = self.sentence.split(" ") for word in new_sentence: counter = 0 if word[0] in self.vowels: continue for letter in word: if counter == 1: continue if letter in self.vowels: counter += 1 else: pass convert = Pig_latin("Hello there") convert.convert_sentence()
Add initial setup for solution
Add initial setup for solution
Python
mit
Bigless27/Python-Projects
90265098d21fa900a4a2d86719efc95c352812f4
mopidy_somafm/__init__.py
mopidy_somafm/__init__.py
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def get_backend_classes(self): from .actor import SomaFMBackend return [SomaFMBackend]
from __future__ import unicode_literals import os from mopidy import config, ext __version__ = '0.3.0' class Extension(ext.Extension): dist_name = 'Mopidy-SomaFM' ext_name = 'somafm' version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf') return config.read(conf_file) def get_config_schema(self): schema = super(Extension, self).get_config_schema() return schema def setup(self, registry): from .actor import SomaFMBackend registry.add('backend', SomaFMBackend)
Use new extension setup() API
Use new extension setup() API
Python
mit
AlexandrePTJ/mopidy-somafm
a7908d39e24384881c30042e1b4c7e93e85eb38e
test/TestTaskIncludes.py
test/TestTaskIncludes.py
import os import unittest from ansiblelint import Runner, RulesCollection class TestTaskIncludes(unittest.TestCase): def setUp(self): rulesdir = os.path.join('lib', 'ansiblelint', 'rules') self.rules = RulesCollection.create_from_directory(rulesdir) def test_block_included_tasks(self): filename = 'test/blockincludes.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4) def test_block_included_tasks_with_rescue_and_always(self): filename = 'test/blockincludes2.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4) def test_included_tasks(self): filename = 'test/taskincludes.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4)
import os import unittest from ansiblelint import Runner, RulesCollection class TestTaskIncludes(unittest.TestCase): def setUp(self): rulesdir = os.path.join('lib', 'ansiblelint', 'rules') self.rules = RulesCollection.create_from_directory(rulesdir) def test_block_included_tasks(self): filename = 'test/blockincludes.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4) def test_block_included_tasks_with_rescue_and_always(self): filename = 'test/blockincludes2.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4) def test_included_tasks(self): filename = 'test/taskincludes.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 4) def test_include_tasks_with_block_include(self): filename = 'test/include-in-block.yml' runner = Runner(self.rules, filename, [], [], []) runner.run() self.assertEqual(len(runner.playbooks), 3)
Add test that exercises block includes
Add test that exercises block includes
Python
mit
MatrixCrawler/ansible-lint,dataxu/ansible-lint,willthames/ansible-lint
770328ea42182edc216d5abe4430e3ffd51a7793
djmoney/__init__.py
djmoney/__init__.py
from django.db import models from django.utils.encoding import smart_unicode from django.utils import formats from django.utils import timezone from django.core.exceptions import ObjectDoesNotExist from django.contrib.admin.util import lookup_field from django.utils.safestring import mark_safe from django.utils.html import conditional_escape from django.db.models.fields.related import ManyToManyRel from django.contrib.admin import util as admin_util def djmoney_display_for_field(value, field): from django.contrib.admin.templatetags.admin_list import _boolean_icon from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE try: if field.flatchoices: return dict(field.flatchoices).get(value, EMPTY_CHANGELIST_VALUE) # NullBooleanField needs special-case null-handling, so it comes # before the general null test. elif isinstance(field, models.BooleanField) or isinstance(field, models.NullBooleanField): return _boolean_icon(value) elif value is None: return EMPTY_CHANGELIST_VALUE elif isinstance(field, models.DateTimeField): return formats.localize(timezone.localtime(value)) elif isinstance(field, models.DateField) or isinstance(field, models.TimeField): return formats.localize(value) elif isinstance(field, models.DecimalField): return formats.number_format(value, field.decimal_places) elif isinstance(field, models.FloatField): return formats.number_format(value) else: return smart_unicode(value) except: return smart_unicode(value) admin_util.display_for_field = djmoney_display_for_field def djmoney_contents(self): from django.contrib.admin.templatetags.admin_list import _boolean_icon from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin try: f, attr, value = lookup_field(field, obj, model_admin) except (AttributeError, ValueError, ObjectDoesNotExist): result_repr = EMPTY_CHANGELIST_VALUE else: if f is None: boolean = getattr(attr, "boolean", False) if boolean: result_repr = _boolean_icon(value) else: result_repr = smart_unicode(value) if getattr(attr, "allow_tags", False): result_repr = mark_safe(result_repr) else: if value is None: result_repr = EMPTY_CHANGELIST_VALUE elif isinstance(f.rel, ManyToManyRel): result_repr = ", ".join(map(unicode, value.all())) else: result_repr = djmoney_display_for_field(value, f) return conditional_escape(result_repr) from django.contrib.admin.helpers import AdminReadonlyField AdminReadonlyField.contents = djmoney_contents
Allow django-money to be specified as read-only in a model
Allow django-money to be specified as read-only in a model Monkey patch the Django admin so that we can display django-money fields read-only. In order to do this, we simply catch the exception that results from trying to convert a money object (e.g. '10 USD') into a floating field. And then we call just ask for the string representation of the field.
Python
bsd-3-clause
rescale/django-money,iXioN/django-money,tsouvarev/django-money,tsouvarev/django-money,recklessromeo/django-money,iXioN/django-money,AlexRiina/django-money,recklessromeo/django-money
0ba063edc4aec690efca5c5ba9faf64042bb7707
demo/demo/urls.py
demo/demo/urls.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf.urls import patterns, url from .views import HomePageView, FormHorizontalView, FormInlineView, PaginationView, FormWithFilesView, \ DefaultFormView, MiscView, DefaultFormsetView, DefaultFormByFieldView urlpatterns = [ url(r'^$', HomePageView.as_view(), name='home'), url(r'^formset$', DefaultFormsetView.as_view(), name='formset_default'), url(r'^form$', DefaultFormView.as_view(), name='form_default'), url(r'^form_by_field$', DefaultFormByFieldView.as_view(), name='form_by_field'), url(r'^form_horizontal$', FormHorizontalView.as_view(), name='form_horizontal'), url(r'^form_inline$', FormInlineView.as_view(), name='form_inline'), url(r'^form_with_files$', FormWithFilesView.as_view(), name='form_with_files'), url(r'^pagination$', PaginationView.as_view(), name='pagination'), url(r'^misc$', MiscView.as_view(), name='misc'), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf.urls import url from .views import HomePageView, FormHorizontalView, FormInlineView, PaginationView, FormWithFilesView, \ DefaultFormView, MiscView, DefaultFormsetView, DefaultFormByFieldView urlpatterns = [ url(r'^$', HomePageView.as_view(), name='home'), url(r'^formset$', DefaultFormsetView.as_view(), name='formset_default'), url(r'^form$', DefaultFormView.as_view(), name='form_default'), url(r'^form_by_field$', DefaultFormByFieldView.as_view(), name='form_by_field'), url(r'^form_horizontal$', FormHorizontalView.as_view(), name='form_horizontal'), url(r'^form_inline$', FormInlineView.as_view(), name='form_inline'), url(r'^form_with_files$', FormWithFilesView.as_view(), name='form_with_files'), url(r'^pagination$', PaginationView.as_view(), name='pagination'), url(r'^misc$', MiscView.as_view(), name='misc'), ]
Remove obsolete import (removed in Django 1.10)
Remove obsolete import (removed in Django 1.10)
Python
bsd-3-clause
dyve/django-bootstrap3,dyve/django-bootstrap3,zostera/django-bootstrap4,zostera/django-bootstrap4
5e9fda28089a11863dcc4610f5953dbe942165db
numpy/_array_api/_constants.py
numpy/_array_api/_constants.py
from ._array_object import ndarray from ._dtypes import float64 import numpy as np e = ndarray._new(np.array(np.e, dtype=float64)) inf = ndarray._new(np.array(np.inf, dtype=float64)) nan = ndarray._new(np.array(np.nan, dtype=float64)) pi = ndarray._new(np.array(np.pi, dtype=float64))
import numpy as np e = np.e inf = np.inf nan = np.nan pi = np.pi
Make the array API constants Python floats
Make the array API constants Python floats
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
faa67c81ad2ebb8ba8cb407982cbced72d1fa899
tests/test_config_tree.py
tests/test_config_tree.py
import pytest from pyhocon.config_tree import ConfigTree from pyhocon.exceptions import ConfigMissingException, ConfigWrongTypeException class TestConfigParser(object): def test_config_tree_quoted_string(self): config_tree = ConfigTree() config_tree.put("a.b.c", "value") assert config_tree.get("a.b.c") == "value" with pytest.raises(ConfigMissingException): assert config_tree.get("a.b.d") with pytest.raises(ConfigMissingException): config_tree.get("a.d.e") with pytest.raises(ConfigWrongTypeException): config_tree.get("a.b.c.e") def test_config_tree_number(self): config_tree = ConfigTree() config_tree.put("a.b.c", 5) assert config_tree.get("a.b.c") == 5
import pytest from pyhocon.config_tree import ConfigTree from pyhocon.exceptions import ConfigMissingException, ConfigWrongTypeException class TestConfigParser(object): def test_config_tree_quoted_string(self): config_tree = ConfigTree() config_tree.put("a.b.c", "value") assert config_tree.get("a.b.c") == "value" with pytest.raises(ConfigMissingException): assert config_tree.get("a.b.d") with pytest.raises(ConfigMissingException): config_tree.get("a.d.e") with pytest.raises(ConfigWrongTypeException): config_tree.get("a.b.c.e") def test_config_tree_number(self): config_tree = ConfigTree() config_tree.put("a.b.c", 5) assert config_tree.get("a.b.c") == 5 def test_config_tree_iterator(self): config_tree = ConfigTree() config_tree.put("a.b.c", 5) for k in config_tree: assert k == "a" assert config_tree[k]["b.c"] == 5 def test_config_logging(self): import logging, logging.config config_tree = ConfigTree() config_tree.put('version', 1) config_tree.put('root.level', logging.INFO) assert dict(config_tree)['version'] == 1 logging.config.dictConfig(config_tree)
Add failing tests for iteration and logging config
Add failing tests for iteration and logging config
Python
apache-2.0
acx2015/pyhocon,chimpler/pyhocon,vamega/pyhocon,peoplepattern/pyhocon
07dcdb9de47ac88a2e0f3ecec257397a0272f112
extended_choices/__init__.py
extended_choices/__init__.py
"""Little helper application to improve django choices (for fields)""" from __future__ import unicode_literals from .choices import Choices __author__ = 'Stephane "Twidi" Ange;' __contact__ = "[email protected]" __homepage__ = "https://pypi.python.org/pypi/django-extended-choices" __version__ = "1.1"
"""Little helper application to improve django choices (for fields)""" from __future__ import unicode_literals from .choices import Choices, OrderedChoices __author__ = 'Stephane "Twidi" Ange;' __contact__ = "[email protected]" __homepage__ = "https://pypi.python.org/pypi/django-extended-choices" __version__ = "1.1"
Make OrderedChoices available at the package root
Make OrderedChoices available at the package root
Python
bsd-3-clause
twidi/django-extended-choices
d07a7ad25f69a18c57c50d6c32df212e1f987bd4
www/tests/test_collections.py
www/tests/test_collections.py
import collections _d=collections.defaultdict(int) _d['a']+=1 _d['a']+=2 _d['b']+=4 assert _d['a'] == 3 assert _d['b'] == 4 s = 'mississippi' for k in s: _d[k] += 1 _values=list(_d.values()) _values.sort() assert _values == [1, 2, 3, 4, 4, 4] _keys=list(_d.keys()) _keys.sort() assert _keys == ['a', 'b', 'i', 'm', 'p', 's'] #now try with default being list (ie, empty list) _listdict=collections.defaultdict(list) for _i in range(10): _listdict['mylist'].append(_i) assert _listdict['not called'] == [] assert _listdict['mylist'] == [0,1,2,3,4,5,6,7,8,9]
import collections _d=collections.defaultdict(int) _d['a']+=1 _d['a']+=2 _d['b']+=4 assert _d['a'] == 3 assert _d['b'] == 4 s = 'mississippi' for k in s: _d[k] += 1 _values=list(_d.values()) _values.sort() assert _values == [1, 2, 3, 4, 4, 4] _keys=list(_d.keys()) _keys.sort() assert _keys == ['a', 'b', 'i', 'm', 'p', 's'] #now try with default being list (ie, empty list) _listdict=collections.defaultdict(list) for _i in range(10): _listdict['mylist'].append(_i) assert _listdict['not called'] == [] assert _listdict['mylist'] == [0,1,2,3,4,5,6,7,8,9] # namedtuple a = collections.namedtuple("foo", "bar bash bing")(1, 2, 3) assert a.bar == 1 assert a.bash == 2 assert repr(a) == 'foo(bar=1, bash=2, bing=3)'
Add a test on namedtuple
Add a test on namedtuple
Python
bsd-3-clause
kikocorreoso/brython,Mozhuowen/brython,Hasimir/brython,Isendir/brython,Isendir/brython,amrdraz/brython,jonathanverner/brython,kevinmel2000/brython,brython-dev/brython,Mozhuowen/brython,jonathanverner/brython,Hasimir/brython,rubyinhell/brython,Hasimir/brython,molebot/brython,Isendir/brython,JohnDenker/brython,olemis/brython,kevinmel2000/brython,molebot/brython,Mozhuowen/brython,kevinmel2000/brython,brython-dev/brython,rubyinhell/brython,kikocorreoso/brython,amrdraz/brython,Lh4cKg/brython,rubyinhell/brython,Lh4cKg/brython,Hasimir/brython,olemis/brython,olemis/brython,jonathanverner/brython,kikocorreoso/brython,amrdraz/brython,molebot/brython,Lh4cKg/brython,molebot/brython,JohnDenker/brython,jonathanverner/brython,Lh4cKg/brython,olemis/brython,kevinmel2000/brython,rubyinhell/brython,Mozhuowen/brython,amrdraz/brython,brython-dev/brython,Isendir/brython,JohnDenker/brython,JohnDenker/brython
888584a49e697551c4f680cc8651be2fe80fc65d
configgen/generators/ppsspp/ppssppGenerator.py
configgen/generators/ppsspp/ppssppGenerator.py
#!/usr/bin/env python import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: # Write emu.cfg to map joysticks, init with the default emu.cfg Config = ConfigParser.ConfigParser() Config.read(recalboxFiles.reicastConfigInit) section = "input" # For each pad detected for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) # the command to run #~ commandArray = [recalboxFiles.ppssppBin, rom, "--escape-exit"] commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
#!/usr/bin/env python import Command #~ import reicastControllers import recalboxFiles from generators.Generator import Generator import ppssppControllers import shutil import os.path import ConfigParser class PPSSPPGenerator(Generator): # Main entry of the module # Configure fba and return a command def generate(self, system, rom, playersControllers): if not system.config['configfile']: for index in playersControllers : controller = playersControllers[index] # we only care about player 1 if controller.player != "1": continue ppssppControllers.generateControllerConfig(controller) break # the command to run commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom] # The next line is a reminder on how to quit PPSSPP with just the HK #commandArray = [recalboxFiles.recalboxBins[system.config['emulator']], rom, "--escape-exit"] return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"XDG_CONFIG_HOME":recalboxFiles.CONF, "SDL_VIDEO_GL_DRIVER": "/usr/lib/libGLESv2.so"}, delay=1)
Remove a bad typo from reicast
Remove a bad typo from reicast
Python
mit
nadenislamarre/recalbox-configgen,recalbox/recalbox-configgen,digitalLumberjack/recalbox-configgen
d2d50e93911693c326b057a4c48f0a47d520f0a1
skimage/future/__init__.py
skimage/future/__init__.py
"""Functionality with an experimental API. Although you can count on the functions in this package being around in the future, the API may change with any version update **and will not follow the skimage two-version deprecation path**. Therefore, use the functions herein with care, and do not use them in production code that will depend on updated skimage versions. """
Add package docstring for skimage.future
Add package docstring for skimage.future
Python
bsd-3-clause
ClinicalGraphics/scikit-image,ClinicalGraphics/scikit-image,newville/scikit-image,michaelaye/scikit-image,WarrenWeckesser/scikits-image,Hiyorimi/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,dpshelio/scikit-image,juliusbierk/scikit-image,newville/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,ofgulban/scikit-image,keflavich/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,Britefury/scikit-image,bennlich/scikit-image,oew1v07/scikit-image,paalge/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,bsipocz/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,emon10005/scikit-image,vighneshbirodkar/scikit-image,oew1v07/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,robintw/scikit-image,ajaybhat/scikit-image,michaelpacer/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,Britefury/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,emon10005/scikit-image,blink1073/scikit-image,jwiggins/scikit-image,blink1073/scikit-image,pratapvardhan/scikit-image,paalge/scikit-image,rjeli/scikit-image,Midafi/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image
cbdfc1b1cb4162256538576cabe2b6832aa83bca
django_mysqlpool/__init__.py
django_mysqlpool/__init__.py
from functools import wraps from django.db import connection def auto_close_db(f): "Ensures the database connection is closed when the function returns." @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) finally: connection.close() return wrapper
from functools import wraps def auto_close_db(f): "Ensures the database connection is closed when the function returns." from django.db import connection @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) finally: connection.close() return wrapper
Fix circular import when used with other add-ons that import django.db
Fix circular import when used with other add-ons that import django.db eg sorl_thumbnail: Traceback (most recent call last): File "/home/rpatterson/src/work/retrans/src/ReTransDjango/bin/manage", line 40, in <module> sys.exit(manage.main()) File "/home/rpatterson/src/work/retrans/src/ReTransDjango/retrans/manage.py", line 15, in main execute_manager(settings) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 438, in execute_manager utility.execute() File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/__init__.py", line 379, in execute self.fetch_command(subcommand).run_from_argv(self.argv) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 191, in run_from_argv self.execute(*args, **options.__dict__) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/core/management/base.py", line 209, in execute translation.activate('en-us') File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/__init__.py", line 100, in activate return _trans.activate(language) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 202, in activate _active.value = translation(language) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 185, in translation default_translation = _fetch(settings.LANGUAGE_CODE) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/translation/trans_real.py", line 162, in _fetch app = import_module(appname) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/utils/importlib.py", line 35, in import_module __import__(name) File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/__init__.py", line 1, in <module> from sorl.thumbnail.fields import ImageField File "/opt/src/eggs/sorl_thumbnail-11.12-py2.7.egg/sorl/thumbnail/fields.py", line 2, in <module> from django.db import models File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/__init__.py", line 78, in <module> connection = connections[DEFAULT_DB_ALIAS] File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 94, in __getitem__ backend = load_backend(db['ENGINE']) File "/opt/src/eggs/Django-1.3-py2.7.egg/django/db/utils.py", line 47, in load_backend if backend_name not in available_backends: django.core.exceptions.ImproperlyConfigured: 'django_mysqlpool.backends.mysqlpool' isn't an available database backend. Try using django.db.backends.XXX, where XXX is one of: 'dummy', 'mysql', 'oracle', 'postgresql', 'postgresql_psycopg2', 'sqlite3' Error was: cannot import name connection
Python
mit
smartfile/django-mysqlpool
0f7816676eceb42f13786408f1d1a09527919a1e
Modules/Biophotonics/python/iMC/msi/io/spectrometerreader.py
Modules/Biophotonics/python/iMC/msi/io/spectrometerreader.py
# -*- coding: utf-8 -*- """ Created on Fri Aug 7 12:04:18 2015 @author: wirkert """ import numpy as np from msi.io.reader import Reader from msi.msi import Msi class SpectrometerReader(Reader): def __init__(self): pass def read(self, file_to_read): # our spectrometer like to follow german standards in files, we need # to switch to english ones transformed="" replacements = {',': '.', '\r\n': ''} with open(file_to_read) as infile: for line in infile: for src, target in replacements.iteritems(): line = line.replace(src, target) transformed = "\n".join([transformed, line]) for num, line in enumerate(transformed.splitlines(), 1): if ">>>>>Begin Spectral Data<<<<<" in line: break string_only_spectrum = "\n".join(transformed.splitlines()[num:]) data_vector = np.fromstring(string_only_spectrum, sep="\t").reshape(-1, 2) msi = Msi(data_vector[:, 1], {'wavelengths': data_vector[:, 0] * 10 ** -9}) return msi
# -*- coding: utf-8 -*- """ Created on Fri Aug 7 12:04:18 2015 @author: wirkert """ import numpy as np from msi.io.reader import Reader from msi.msi import Msi class SpectrometerReader(Reader): def __init__(self): pass def read(self, file_to_read): # our spectrometer like to follow german standards in files, we need # to switch to english ones transformed="" replacements = {',': '.', '\r\n': ''} with open(file_to_read) as infile: for line in infile: for src, target in replacements.iteritems(): line = line.replace(src, target) transformed = "\n".join([transformed, line]) for num, line in enumerate(transformed.splitlines(), 1): if ">>>>>Begin" in line: break for num_end, line in enumerate(transformed.splitlines(), 1): if ">>>>>End" in line: num_end -= 1 break string_only_spectrum = "\n".join(transformed.splitlines()[num:num_end]) data_vector = np.fromstring(string_only_spectrum, sep="\t").reshape(-1, 2) msi = Msi(data_vector[:, 1], {'wavelengths': data_vector[:, 0] * 10 ** -9}) return msi
Change SpectrometerReader a little so it can handle more data formats.
Change SpectrometerReader a little so it can handle more data formats.
Python
bsd-3-clause
MITK/MITK,iwegner/MITK,RabadanLab/MITKats,RabadanLab/MITKats,iwegner/MITK,fmilano/mitk,fmilano/mitk,RabadanLab/MITKats,RabadanLab/MITKats,fmilano/mitk,fmilano/mitk,MITK/MITK,RabadanLab/MITKats,RabadanLab/MITKats,fmilano/mitk,fmilano/mitk,iwegner/MITK,fmilano/mitk,MITK/MITK,iwegner/MITK,iwegner/MITK,MITK/MITK,MITK/MITK,iwegner/MITK,MITK/MITK
33a3ebacabda376826f0470129e8583e4974fd9d
examples/markdown/build.py
examples/markdown/build.py
# build.py #!/usr/bin/env python3 import os import jinja2 # Markdown to HTML library # https://pypi.org/project/Markdown/ import markdown from staticjinja import Site markdowner = markdown.Markdown(output_format="html5") def md_context(template): with open(template.filename) as f: markdown_content = f.read() return {'post_content_html': markdowner.convert(markdown_content)} def render_md(site, template, **kwargs): # Given a template such as posts/post1.md # Determine the post's title (post1) and it's directory (posts/) directory, fname = os.path.split(template.name) post_title, _ = fname.split(".") # Determine where the result will be streamed (build/posts/post1.html) out_dir = os.path.join(site.outpath, directory) post_fname = "{}.html".format(post_title) out = os.path.join(out_dir, post_fname) # Render and stream the result if not os.path.exists(out_dir): os.makedirs(out_dir) post_template = site.get_template("_post.html") post_template.stream(**kwargs).dump(out, encoding="utf-8") site = Site.make_site( searchpath='src', outpath='build', contexts=[('.*.md', md_context)], rules = [('.*.md', render_md)], ) site.render()
# build.py #!/usr/bin/env python3 import os # Markdown to HTML library # https://pypi.org/project/Markdown/ import markdown from staticjinja import Site markdowner = markdown.Markdown(output_format="html5") def md_context(template): with open(template.filename) as f: markdown_content = f.read() return {'post_content_html': markdowner.convert(markdown_content)} def render_md(site, template, **kwargs): # Given a template such as posts/post1.md # Determine the post's title (post1) and it's directory (posts/) directory, fname = os.path.split(template.name) post_title, _ = fname.split(".") # Determine where the result will be streamed (build/posts/post1.html) out_dir = os.path.join(site.outpath, directory) post_fname = "{}.html".format(post_title) out = os.path.join(out_dir, post_fname) # Render and stream the result if not os.path.exists(out_dir): os.makedirs(out_dir) post_template = site.get_template("_post.html") post_template.stream(**kwargs).dump(out, encoding="utf-8") site = Site.make_site( searchpath='src', outpath='build', contexts=[('.*.md', md_context)], rules = [('.*.md', render_md)], ) site.render()
Remove unneeded jinja2 import in markdown example
Remove unneeded jinja2 import in markdown example
Python
mit
Ceasar/staticjinja,Ceasar/staticjinja
c9735ce9ea330737cf47474ef420303c56a32873
apps/demos/admin.py
apps/demos/admin.py
from django.contrib import admin from .models import Submission class SubmissionAdmin(admin.ModelAdmin): list_display = ( 'title', 'creator', 'featured', 'hidden', 'tags', 'modified', ) admin.site.register(Submission, SubmissionAdmin)
from django.contrib import admin from .models import Submission class SubmissionAdmin(admin.ModelAdmin): list_display = ( 'title', 'creator', 'featured', 'hidden', 'tags', 'modified', ) list_editable = ( 'featured', 'hidden' ) admin.site.register(Submission, SubmissionAdmin)
Make featured and hidden flags editable from demo listing
Make featured and hidden flags editable from demo listing
Python
mpl-2.0
bluemini/kuma,openjck/kuma,hoosteeno/kuma,Elchi3/kuma,davehunt/kuma,anaran/kuma,robhudson/kuma,davidyezsetz/kuma,darkwing/kuma,anaran/kuma,chirilo/kuma,jezdez/kuma,yfdyh000/kuma,nhenezi/kuma,Elchi3/kuma,ronakkhunt/kuma,a2sheppy/kuma,groovecoder/kuma,SphinxKnight/kuma,escattone/kuma,jezdez/kuma,tximikel/kuma,a2sheppy/kuma,varunkamra/kuma,ollie314/kuma,ronakkhunt/kuma,biswajitsahu/kuma,hoosteeno/kuma,escattone/kuma,SphinxKnight/kuma,cindyyu/kuma,Elchi3/kuma,groovecoder/kuma,biswajitsahu/kuma,utkbansal/kuma,FrankBian/kuma,nhenezi/kuma,davehunt/kuma,openjck/kuma,jgmize/kuma,nhenezi/kuma,escattone/kuma,bluemini/kuma,jwhitlock/kuma,bluemini/kuma,varunkamra/kuma,RanadeepPolavarapu/kuma,biswajitsahu/kuma,SphinxKnight/kuma,carnell69/kuma,anaran/kuma,cindyyu/kuma,hoosteeno/kuma,ronakkhunt/kuma,cindyyu/kuma,mozilla/kuma,SphinxKnight/kuma,tximikel/kuma,scrollback/kuma,jezdez/kuma,mozilla/kuma,scrollback/kuma,SphinxKnight/kuma,surajssd/kuma,robhudson/kuma,YOTOV-LIMITED/kuma,FrankBian/kuma,tximikel/kuma,jgmize/kuma,robhudson/kuma,varunkamra/kuma,carnell69/kuma,yfdyh000/kuma,scrollback/kuma,jwhitlock/kuma,utkbansal/kuma,scrollback/kuma,varunkamra/kuma,chirilo/kuma,safwanrahman/kuma,Elchi3/kuma,cindyyu/kuma,bluemini/kuma,openjck/kuma,groovecoder/kuma,utkbansal/kuma,jezdez/kuma,surajssd/kuma,ronakkhunt/kuma,RanadeepPolavarapu/kuma,davidyezsetz/kuma,jgmize/kuma,utkbansal/kuma,surajssd/kuma,surajssd/kuma,whip112/Whip112,yfdyh000/kuma,biswajitsahu/kuma,davehunt/kuma,whip112/Whip112,FrankBian/kuma,jgmize/kuma,hoosteeno/kuma,chirilo/kuma,safwanrahman/kuma,mastizada/kuma,jezdez/kuma,robhudson/kuma,tximikel/kuma,mozilla/kuma,mastizada/kuma,surajssd/kuma,a2sheppy/kuma,tximikel/kuma,FrankBian/kuma,openjck/kuma,varunkamra/kuma,YOTOV-LIMITED/kuma,carnell69/kuma,RanadeepPolavarapu/kuma,MenZil/kuma,varunkamra/kuma,darkwing/kuma,tximikel/kuma,RanadeepPolavarapu/kuma,groovecoder/kuma,nhenezi/kuma,biswajitsahu/kuma,hoosteeno/kuma,FrankBian/kuma,carnell69/kuma,robhudson/kuma,MenZil/kuma,anaran/kuma,jgmize/kuma,cindyyu/kuma,mastizada/kuma,ollie314/kuma,safwanrahman/kuma,chirilo/kuma,whip112/Whip112,carnell69/kuma,anaran/kuma,utkbansal/kuma,ollie314/kuma,whip112/Whip112,safwanrahman/kuma,YOTOV-LIMITED/kuma,jwhitlock/kuma,jwhitlock/kuma,openjck/kuma,safwanrahman/kuma,surajssd/kuma,openjck/kuma,cindyyu/kuma,MenZil/kuma,groovecoder/kuma,nhenezi/kuma,a2sheppy/kuma,YOTOV-LIMITED/kuma,ollie314/kuma,ronakkhunt/kuma,darkwing/kuma,chirilo/kuma,ollie314/kuma,groovecoder/kuma,robhudson/kuma,YOTOV-LIMITED/kuma,darkwing/kuma,a2sheppy/kuma,RanadeepPolavarapu/kuma,mastizada/kuma,MenZil/kuma,biswajitsahu/kuma,ollie314/kuma,anaran/kuma,whip112/Whip112,RanadeepPolavarapu/kuma,bluemini/kuma,YOTOV-LIMITED/kuma,yfdyh000/kuma,mozilla/kuma,ronakkhunt/kuma,davidyezsetz/kuma,davidyezsetz/kuma,davehunt/kuma,hoosteeno/kuma,Elchi3/kuma,SphinxKnight/kuma,MenZil/kuma,davidyezsetz/kuma,mozilla/kuma,jwhitlock/kuma,jgmize/kuma,MenZil/kuma,darkwing/kuma,davehunt/kuma,yfdyh000/kuma,jezdez/kuma,utkbansal/kuma,davehunt/kuma,carnell69/kuma,scrollback/kuma,chirilo/kuma,yfdyh000/kuma,safwanrahman/kuma,bluemini/kuma,darkwing/kuma,whip112/Whip112
2fe02384ba4d5a8dee493d8fa76a3d0a6c440c01
lib/python/webtest/site.py
lib/python/webtest/site.py
#!/usr/bin/env python from twisted.web.server import Site from webtest.session import RedisSessionFactory from webtest.request import RedisRequest from webtest import log logger = log.get_logger() class RedisSite(Site): sessionFactory = RedisSessionFactory requestFactory = RedisRequest def makeSession(self): """ Generate a new Session instance """ uid = self._mkuid() return self.sessionFactory.retrieve(uid, reactor=self._reactor) def getSession(self, uid): """ Get a previously generated session, by its unique ID. This raises a KeyError if the session is not found. """ return self.sessionFactory.retrieve(uid, reactor=self._reactor)
#!/usr/bin/env python from twisted.web.server import Site from webtest.session_factory import RedisSessionFactory from webtest.request import RedisRequest from webtest import log logger = log.get_logger() class RedisSite(Site): sessionFactory = RedisSessionFactory requestFactory = RedisRequest def makeSession(self): """ Generate a new Session instance """ uid = self._mkuid() return self.sessionFactory.retrieve(uid, reactor=self._reactor) def getSession(self, uid): """ Get a previously generated session, by its unique ID. This raises a KeyError if the session is not found. """ return self.sessionFactory.retrieve(uid, reactor=self._reactor)
Move RedisSessionFactory into its own module
Move RedisSessionFactory into its own module
Python
mit
donalm/webtest,donalm/webtest
108b0374d07ca33e90d963692f51e8a66e6d805c
common/middleware/local_node.py
common/middleware/local_node.py
class LocalNodeMiddleware(object): """ Ensures a Node that represents the local server always exists. No other suitable hook for code that's run once and can access the server's host name was found. A migration was not suitable for the second reason. """ def __init__(self): self.local_node_created = False def process_request(self, request): if not self.local_node_created: from dashboard.models import Node nodes = Node.objects.filter(local=True) host = "http://" + request.get_host() if host[-1] != "/": host += "/" service = host + "service/" if len(nodes) == 0: node = Node(name="Local", website_url=host, service_url=service, local=True) node.save() elif len(nodes) == 1: node = nodes[0] node.host = host node.service = service node.save() else: raise RuntimeError("More than one local node found in Nodes table. Please fix before continuing.") self.local_node_created = True return None
class LocalNodeMiddleware(object): """ Ensures a Node that represents the local server always exists. No other suitable hook for code that's run once and can access the server's host name was found. A migration was not suitable for the second reason. """ def __init__(self): self.local_node_created = False def process_request(self, request): if not self.local_node_created: from dashboard.models import Node nodes = Node.objects.filter(local=True) host = "http://" + request.get_host() if host[-1] != "/": host += "/" service = host + "service/" if len(nodes) == 0: node = Node(name="Local", website_url=host, service_url=service, local=True) node.save() elif len(nodes) == 1: node = nodes[0] node.host = host node.service = service # TODO: Fix bug that prevents this from actually saving node.save() else: raise RuntimeError("More than one local node found in Nodes table. Please fix before continuing.") self.local_node_created = True return None
Add TODO to fix bug at later date
Add TODO to fix bug at later date
Python
apache-2.0
TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution
8d471b5b7a8f57214afe79783f09afa97c5d2bfc
entropy/__init__.py
entropy/__init__.py
import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) def absolute_entropy(data): """Compute the "absolute" entropy of the given string. The absolute entropy of a string is how many bits of information, total, are in the entire string. This is the same as the Shannon entropy multiplied by the length of the string. A string can be losslessly compressed to a size no smaller than its absolute entropy.""" return entropy(data) * len(data) def relative_entropy(data): """Compute the relative entropy of the given string. The relative entropy is the ratio of the entropy of a string to its size, i.e., a measure of how well it uses space. It is, therefore, a floating point value on the interval (0, 1].""" return entropy(data) / 8 if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
Add absolute and relative entropy functions.
Add absolute and relative entropy functions.
Python
bsd-3-clause
chachalaca/py-entropy,billthebrute/py-entropy,chachalaca/py-entropy,billthebrute/py-entropy
b43e06dd5a80814e15ce20f50d683f0daaa19a93
addons/hr/models/hr_employee_base.py
addons/hr/models/hr_employee_base.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class HrEmployeeBase(models.AbstractModel): _name = "hr.employee.base" _description = "Basic Employee" _order = 'name' name = fields.Char() active = fields.Boolean("Active") department_id = fields.Many2one('hr.department', 'Department') job_id = fields.Many2one('hr.job', 'Job Position') job_title = fields.Char("Job Title") company_id = fields.Many2one('res.company', 'Company') address_id = fields.Many2one('res.partner', 'Work Address') work_phone = fields.Char('Work Phone') mobile_phone = fields.Char('Work Mobile') work_email = fields.Char('Work Email') work_location = fields.Char('Work Location') user_id = fields.Many2one('res.users') resource_id = fields.Many2one('resource.resource') resource_calendar_id = fields.Many2one('resource.calendar')
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import fields, models class HrEmployeeBase(models.AbstractModel): _name = "hr.employee.base" _description = "Basic Employee" _order = 'name' name = fields.Char() active = fields.Boolean("Active") color = fields.Integer('Color Index', default=0) department_id = fields.Many2one('hr.department', 'Department') job_id = fields.Many2one('hr.job', 'Job Position') job_title = fields.Char("Job Title") company_id = fields.Many2one('res.company', 'Company') address_id = fields.Many2one('res.partner', 'Work Address') work_phone = fields.Char('Work Phone') mobile_phone = fields.Char('Work Mobile') work_email = fields.Char('Work Email') work_location = fields.Char('Work Location') user_id = fields.Many2one('res.users') resource_id = fields.Many2one('resource.resource') resource_calendar_id = fields.Many2one('resource.calendar')
Add the color field to public employee
[FIX] hr: Add the color field to public employee The color field is necessary to be able to display some fields (many2many_tags) and used in the kanban views closes odoo/odoo#35216 Signed-off-by: Yannick Tivisse (yti) <[email protected]> closes odoo/odoo#35462 Signed-off-by: Romain Libert (rli) <[email protected]>
Python
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
96e3d70a7bd824b8265e8e67adc3996c4522dd57
historia.py
historia.py
from eve import Eve app = Eve() if __name__ == '__main__': app.run()
from eve import Eve app = Eve() if __name__ == '__main__': app.run(host='0.0.0.0', port=80)
Use port 80 to serve the API
Use port 80 to serve the API
Python
mit
waoliveros/historia
be41ae0d987cd1408cb2db649a2eccd73bc272f3
apps/innovate/views.py
apps/innovate/views.py
import random import jingo from users.models import Profile from projects.models import Project from events.models import Event from feeds.models import Entry def splash(request): """Display splash page. With featured project, event, person, blog post.""" def get_random(cls, **kwargs): choices = cls.objects.filter(**kwargs) return choices and random.choice(choices) or None return jingo.render(request, 'innovate/splash.html', { 'featured_project': get_random(Project, featured=True), 'featured_event': get_random(Event, featured=True), 'featured_user': get_random(Profile, featured=True), 'entry': get_random(Entry, link__featured=True) }) def about(request): """Display the about page. Simple direct to template.""" # NOTE: can't use ``django.views.generic.simple.direct_to_template`` # because we use jinja2 templates instead of Django templates. return jingo.render(request, 'innovate/about.html') def handle404(request): """Handle 404 responses.""" return jingo.render(request, 'handlers/404.html') def handle500(request): """Handle server errors.""" return jingo.render(request, 'handlers/500.html')
import random import jingo from users.models import Profile from projects.models import Project from events.models import Event from feeds.models import Entry def splash(request): """Display splash page. With featured project, event, person, blog post.""" def get_random(cls, **kwargs): choices = cls.objects.filter(**kwargs) return choices and random.choice(choices) or None return jingo.render(request, 'innovate/splash.html', { 'featured_project': get_random(Project, featured=True), 'featured_event': get_random(Event, featured=True), 'featured_user': get_random(Profile, featured=True), 'entry': get_random(Entry, link__featured=True) }) def about(request): """Display the about page. Simple direct to template.""" # NOTE: can't use ``django.views.generic.simple.direct_to_template`` # because we use jinja2 templates instead of Django templates. return jingo.render(request, 'innovate/about.html') def handle404(request): """Handle 404 responses.""" return jingo.render(request, 'handlers/404.html', status=404) def handle500(request): """Handle server errors.""" return jingo.render(request, 'handlers/500.html', status=500)
Add status codes to the 404/500 error handlers.
Add status codes to the 404/500 error handlers.
Python
bsd-3-clause
mozilla/betafarm,mozilla/betafarm,mozilla/betafarm,mozilla/betafarm
60837893bf12bc5476b050cdc689260e9695a297
fileupload/models.py
fileupload/models.py
# encoding: utf-8 from django.db import models import uuid import os def unique_file_name(instance, filename): path = 'benchmarkLogs/' name = str(uuid.uuid4()) + '.log' return os.path.join(path, name) class Picture(models.Model): """This is a small demo using just two fields. The slug field is really not necessary, but makes the code simpler. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.FileField(upload_to=unique_file_name) slug = models.SlugField(max_length=50, blank=True) def __unicode__(self): return self.file.name @models.permalink def get_absolute_url(self): return ('upload-new', ) def save(self, *args, **kwargs): self.slug = self.file.name super(Picture, self).save(*args, **kwargs) def delete(self, *args, **kwargs): """delete -- Remove to leave file.""" self.file.delete(False) super(Picture, self).delete(*args, **kwargs)
# encoding: utf-8 from django.db import models import uuid import os def unique_file_name(instance, filename): path = 'benchmarkLogs/' name = str(uuid.uuid4().hex) + '.log' return os.path.join(path, name) class Picture(models.Model): """This is a small demo using just two fields. The slug field is really not necessary, but makes the code simpler. ImageField depends on PIL or pillow (where Pillow is easily installable in a virtualenv. If you have problems installing pillow, use a more generic FileField instead. """ file = models.FileField(upload_to=unique_file_name) slug = models.SlugField(max_length=50, blank=True) def __unicode__(self): return self.file.name @models.permalink def get_absolute_url(self): return ('upload-new', ) def save(self, *args, **kwargs): self.slug = self.file.name super(Picture, self).save(*args, **kwargs) def delete(self, *args, **kwargs): """delete -- Remove to leave file.""" self.file.delete(False) super(Picture, self).delete(*args, **kwargs)
Make sure the filename is alphanumeric
Make sure the filename is alphanumeric
Python
bsd-2-clause
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
81a03d81ece17487f7b7296f524339a052d45a0d
src/gcf.py
src/gcf.py
def gcf(a, b): while b: a, b = b, a % b return a def xgcf(a, b): s1, s2 = 1, 0 t1, t2 = 0, 1 while b: q, r = divmod(a, b) a, b = b, r s2, s1 = s1 - q * s2, s2 t2, t1 = t1 - q * t2, t2 return a, s1, t1
def gcf(a, b): while b: a, b = b, a % b return a def xgcf(a, b): s1, s2 = 1, 0 t1, t2 = 0, 1 while b: q, r = divmod(a, b) a, b = b, r s2, s1 = s1 - q * s2, s2 t2, t1 = t1 - q * t2, t2 # Bézout's identity says: s1 * a + t1 * b == gcd(a, b) return a, s1, t1
Add Bézout's identity into comments
Add Bézout's identity into comments
Python
mit
all3fox/algos-py
85809e34f1d8ad3d8736e141f3cb2e6045260938
neuroimaging/externals/pynifti/nifti/__init__.py
neuroimaging/externals/pynifti/nifti/__init__.py
from niftiimage import NiftiImage
""" Nifti ===== Python bindings for the nifticlibs. Access through the NiftiImage class. See help for pyniftiio.nifti.NiftiImage """ from niftiimage import NiftiImage
Add doc for pynifti package.
DOC: Add doc for pynifti package.
Python
bsd-3-clause
alexis-roche/nipy,alexis-roche/register,nipy/nireg,alexis-roche/register,nipy/nipy-labs,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/niseg,arokem/nipy,arokem/nipy,alexis-roche/nipy,nipy/nireg,bthirion/nipy,alexis-roche/nireg,alexis-roche/niseg,bthirion/nipy,bthirion/nipy,bthirion/nipy,alexis-roche/nireg,arokem/nipy,alexis-roche/nipy,arokem/nipy,alexis-roche/register
04df5c189d6d1760c692d1985faf558058e56eb2
flask_pagedown/__init__.py
flask_pagedown/__init__.py
from jinja2 import Markup from flask import current_app, request class _pagedown(object): def include_pagedown(self): if request.is_secure: protocol = 'https' else: protocol = 'http' return Markup(''' <script type="text/javascript" src="{0}://cdnjs.cloudflare.com/ajax/libs/pagedown/1.0/Markdown.Converter.min.js"></script> <script type="text/javascript" src="{0}://cdnjs.cloudflare.com/ajax/libs/pagedown/1.0/Markdown.Sanitizer.min.js"></script> '''.format(protocol)) def html_head(self): return self.include_pagedown() class PageDown(object): def __init__(self, app = None): if app is not None: self.init_app(app) def init_app(self, app): if not hasattr(app, 'extensions'): app.extensions = {} app.extensions['pagedown'] = _pagedown() app.context_processor(self.context_processor) @staticmethod def context_processor(): return { 'pagedown': current_app.extensions['pagedown'] }
from jinja2 import Markup from flask import current_app, request class _pagedown(object): def include_pagedown(self): return Markup(''' <script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/pagedown/1.0/Markdown.Converter.min.js"></script> <script type="text/javascript" src="//cdnjs.cloudflare.com/ajax/libs/pagedown/1.0/Markdown.Sanitizer.min.js"></script> ''') def html_head(self): return self.include_pagedown() class PageDown(object): def __init__(self, app = None): if app is not None: self.init_app(app) def init_app(self, app): if not hasattr(app, 'extensions'): app.extensions = {} app.extensions['pagedown'] = _pagedown() app.context_processor(self.context_processor) @staticmethod def context_processor(): return { 'pagedown': current_app.extensions['pagedown'] }
Fix support for SSL for proxied sites, or otherwise uncertain situations
Fix support for SSL for proxied sites, or otherwise uncertain situations My particular situation is deployed through ElasticBeanstalk, proxying HTTPS to HTTP on the actual endpoints. This makes flask think that it is only running with http, not https
Python
mit
miguelgrinberg/Flask-PageDown,miguelgrinberg/Flask-PageDown
365411abd73275a529dc5ca7ec403b994c513aae
registries/serializers.py
registries/serializers.py
from rest_framework import serializers from registries.models import Organization from gwells.models import ProvinceState class DrillerListSerializer(serializers.ModelSerializer): province_state = serializers.ReadOnlyField() class Meta: model = Organization # Using all fields for now fields = ( #'who_created', #'when_created', #'who_updated', #'when_updated', 'name', 'street_address', 'city', 'province_state', 'postal_code', 'main_tel', 'fax_tel', 'website_url', 'certificate_authority', )
from rest_framework import serializers from registries.models import Organization from gwells.models import ProvinceState class DrillerListSerializer(serializers.ModelSerializer): """ Serializer for Driller model "list" view. """ province_state = serializers.ReadOnlyField(source="province_state.code") class Meta: model = Organization # Using all fields for now fields = ( #'who_created', #'when_created', #'who_updated', #'when_updated', 'org_guid', 'name', 'street_address', 'city', 'province_state', 'postal_code', 'main_tel', #'fax_tel', #'website_url', #'certificate_authority', )
Add fields to driller list serializer
Add fields to driller list serializer
Python
apache-2.0
bcgov/gwells,rstens/gwells,bcgov/gwells,bcgov/gwells,rstens/gwells,rstens/gwells,rstens/gwells,bcgov/gwells
a2eb2ad96562f5b740338d9acb68dc72f72031a2
astroquery/conftest.py
astroquery/conftest.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import print_function import os # This is to figure out the astroquery version, rather than using Astropy's from . import version # this contains imports plugins that configure py.test for astropy tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. from astropy.tests.pytest_plugins import * try: packagename = os.path.basename(os.path.dirname(__file__)) TESTED_VERSIONS[packagename] = version.version except NameError: pass # Add astropy to test header information and remove unused packages. # Pytest header customisation was introduced in astropy 1.0. try: PYTEST_HEADER_MODULES['Astropy'] = 'astropy' PYTEST_HEADER_MODULES['APLpy'] = 'APLpy' PYTEST_HEADER_MODULES['pyregion'] = 'pyregion' del PYTEST_HEADER_MODULES['h5py'] del PYTEST_HEADER_MODULES['Scipy'] except NameError: pass
# Licensed under a 3-clause BSD style license - see LICENSE.rst from __future__ import print_function import os # This is to figure out the astroquery version, rather than using Astropy's from . import version # this contains imports plugins that configure py.test for astropy tests. # by importing them here in conftest.py they are discoverable by py.test # no matter how it is invoked within the source tree. from astropy.tests.pytest_plugins import * try: packagename = os.path.basename(os.path.dirname(__file__)) TESTED_VERSIONS[packagename] = version.version except NameError: pass # Add astropy to test header information and remove unused packages. # Pytest header customisation was introduced in astropy 1.0. try: PYTEST_HEADER_MODULES['Astropy'] = 'astropy' PYTEST_HEADER_MODULES['APLpy'] = 'aplpy' PYTEST_HEADER_MODULES['pyregion'] = 'pyregion' del PYTEST_HEADER_MODULES['h5py'] del PYTEST_HEADER_MODULES['Scipy'] except NameError: pass
Fix packagename spelling for testing header
Fix packagename spelling for testing header
Python
bsd-3-clause
ceb8/astroquery,ceb8/astroquery,imbasimba/astroquery,imbasimba/astroquery
bfdfb47049c3b560f49bb917ea56e85f85c80e91
project_template/project_name/context_processors.py
project_template/project_name/context_processors.py
from django.conf import settings def add_settings( request ): """Add some selected settings values to the context""" return { 'settings': { 'GOOGLE_ANALYTICS_ACCOUNT': settings.GOOGLE_ANALYTICS_ACCOUNT, } }
from django.conf import settings def add_settings( request ): """Add some selected settings values to the context""" return { 'settings': { 'GOOGLE_ANALYTICS_ACCOUNT': settings.GOOGLE_ANALYTICS_ACCOUNT, 'DEBUG': settings.DEBUG, } }
Make settings.DEBUG available to templates It's used in the default base.html template so makes sense for it to actually appear in the context.
Make settings.DEBUG available to templates It's used in the default base.html template so makes sense for it to actually appear in the context.
Python
agpl-3.0
mysociety/django-jumpstart,mysociety/django-jumpstart
fc9fdd2115b46c71c36ba7d86f14395ac4cf1e3e
genome_designer/scripts/generate_coverage_data.py
genome_designer/scripts/generate_coverage_data.py
"""Script to generate coverage data. """ import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: subprocess.check_call([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=fh)
"""Script to generate coverage data. """ import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: p_mpileup = subprocess.Popen([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=subprocess.PIPE) subprocess.check_call([ 'cut', '-f', '-4' ], stdin=p_mpileup.stdout, stdout=fh)
Update coverage script to only output the first 4 cols which shows coverage.
Update coverage script to only output the first 4 cols which shows coverage.
Python
mit
woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone,churchlab/millstone,churchlab/millstone,churchlab/millstone
79d3f7476ef35ce120190badad63d460d7fa092b
rootio/telephony/forms.py
rootio/telephony/forms.py
# from wtforms import Form from flask.ext.babel import gettext as _ from flask.ext.wtf import Form from wtforms import SubmitField, RadioField, StringField from wtforms.ext.sqlalchemy.orm import model_form from wtforms.validators import AnyOf, Required from .constants import PHONE_NUMBER_TYPE from .models import PhoneNumber, Gateway from ..extensions import db PhoneNumberFormBase = model_form(PhoneNumber, db_session=db.session, base_class=Form, exclude=[ 'areacode', 'created_at', 'updated_at', 'person', 'station_cloud', 'station_transmitter', 'stations', ]) class PhoneNumberForm(PhoneNumberFormBase): number = StringField(_('Phone Number'), [Required], default=" ") number_type = RadioField(_("Type"), [AnyOf([str(val) for val in PHONE_NUMBER_TYPE.keys()])], choices=[(str(val), label) for val, label in PHONE_NUMBER_TYPE.items()]) submit = SubmitField(_('Save')) GatewayFormBase = model_form(Gateway, db_session=db.session, base_class=Form, exclude=[ 'created_at', 'updated_at', ]) class GatewayForm(GatewayFormBase): name = StringField() number_top = db.Column(db.Integer) number_bottom = db.Column(db.Integer) sofia_string = StringField() extra_string = StringField() submit = SubmitField(_('Save'))
# from wtforms import Form from flask.ext.babel import gettext as _ from flask.ext.wtf import Form from wtforms import SubmitField, RadioField, StringField from wtforms.ext.sqlalchemy.orm import model_form from wtforms.validators import AnyOf, Required from .constants import PHONE_NUMBER_TYPE from .models import PhoneNumber, Gateway from ..extensions import db PhoneNumberFormBase = model_form(PhoneNumber, db_session=db.session, base_class=Form, exclude=[ 'areacode', 'created_at', 'updated_at', 'person', 'station_cloud', 'station_transmitter', 'stations', ]) class PhoneNumberForm(PhoneNumberFormBase): number = StringField(_('Phone Number'), [Required()], default=" ") number_type = RadioField(_("Type"), [AnyOf([str(val) for val in PHONE_NUMBER_TYPE.keys()])], choices=[(str(val), label) for val, label in PHONE_NUMBER_TYPE.items()]) submit = SubmitField(_('Save')) GatewayFormBase = model_form(Gateway, db_session=db.session, base_class=Form, exclude=[ 'created_at', 'updated_at', ]) class GatewayForm(GatewayFormBase): name = StringField() number_top = db.Column(db.Integer) number_bottom = db.Column(db.Integer) sofia_string = StringField() extra_string = StringField() submit = SubmitField(_('Save'))
Fix inline phone number form
Fix inline phone number form
Python
agpl-3.0
rootio/rootio_web,rootio/rootio_web,rootio/rootio_web,rootio/rootio_web
e7bda027780da26183f84f7af5c50cd37649c76b
functional_tests/remote.py
functional_tests/remote.py
# -*- coding: utf-8 -*- from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host)], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host)], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
# -*- coding: utf-8 -*- from unipath import Path import subprocess THIS_FOLDER = Path(__file__).parent def reset_database(host): subprocess.check_call(['fab', 'reset_database', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def create_user(host, user, email, password): subprocess.check_call(['fab', 'create_user:user={},password={},email={}' \ .format(user, password, email), '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER) def get_sitename(host): return subprocess.check_output(['fab', 'get_sitename', '--host={}'.format(host), '--hide=everything,status'], cwd=THIS_FOLDER).decode().strip() def create_project(host, user, name, description=''): return subprocess.check_output(['fab', 'create_project:user={},name={},description={}'.format(user, name, description), '--host={}'.format(host)], cwd=THIS_FOLDER) def create_action(host, user, text, project=''): return subprocess.check_output(['fab', 'create_action:user={},text={},project={}'.format(user, text, project), '--host={}'.format(host)], cwd=THIS_FOLDER)
Make running FTs against staging a bit less verbose
Make running FTs against staging a bit less verbose
Python
mit
XeryusTC/projman,XeryusTC/projman,XeryusTC/projman
4daefdb0a4def961572fc22d0fe01a394b11fad9
tests/test_httpclient.py
tests/test_httpclient.py
try: import unittest2 as unittest except ImportError: import unittest import sys sys.path.append('..') from pyrabbit import http class TestHTTPClient(unittest.TestCase): """ Except for the init test, these are largely functional tests that require a RabbitMQ management API to be available on localhost:55672 """ def setUp(self): self.c = http.HTTPClient('localhost:55672', 'guest', 'guest') def test_client_init(self): c = http.HTTPClient('localhost:55672', 'guest', 'guest') self.assertIsInstance(c, http.HTTPClient) def test_client_init_sets_default_timeout(self): self.assertEqual(self.c.client.timeout, 1) def test_client_init_with_timeout(self): c = http.HTTPClient('localhost:55672', 'guest', 'guest', 5) self.assertEqual(c.client.timeout, 5)
try: import unittest2 as unittest except ImportError: import unittest import sys sys.path.append('..') from pyrabbit import http class TestHTTPClient(unittest.TestCase): """ Except for the init test, these are largely functional tests that require a RabbitMQ management API to be available on localhost:55672 """ def setUp(self): self.c = http.HTTPClient('localhost:55672', 'guest', 'guest') def test_client_init(self): c = http.HTTPClient('localhost:55672', 'guest', 'guest') self.assertIsInstance(c, http.HTTPClient) def test_client_init_sets_credentials(self): domain = '' expected_credentials = [(domain, 'guest', 'guest')] self.assertEqual( self.c.client.credentials.credentials, expected_credentials) def test_client_init_sets_default_timeout(self): self.assertEqual(self.c.client.timeout, 1) def test_client_init_with_timeout(self): c = http.HTTPClient('localhost:55672', 'guest', 'guest', 5) self.assertEqual(c.client.timeout, 5)
Test creation of HTTP credentials
tests.http: Test creation of HTTP credentials
Python
bsd-3-clause
ranjithlav/pyrabbit,bkjones/pyrabbit,NeCTAR-RC/pyrabbit,chaos95/pyrabbit,switchtower/pyrabbit
4853e42c080bcb065da16e1d613a73a835afbaf6
infcommon/postgres/factory.py
infcommon/postgres/factory.py
# -*- coding: utf-8 -*- import os from infcommon import Factory from infcommon.postgres.postgres import PostgresClient def postgres_client_from_connection_parameters(user, password, host, port, db_name): connection_uri = 'postgresql://{user}:{password}@{host}:{port}/{db_name}'.format(user=user, password=password, host=host, port=port, db_name=db_name) return Factory.instance('posgres_client_from_connection_parameters', lambda: PostgresClient(connection_uri) ) def postgres_client_from_connection_os_variable(db_uri_os_valiable_name='LOCAL_PG_DB_URI'): connection_uri = os.getenv(db_uri_os_valiable_name) return Factory.instance('posgres_client_from_connection_parameters', lambda: PostgresClient(connection_uri) )
# -*- coding: utf-8 -*- import os from infcommon.factory import Factory from infcommon.postgres.postgres import PostgresClient def postgres_client_from_connection_parameters(user, password, host, port, db_name): connection_uri = 'postgresql://{user}:{password}@{host}:{port}/{db_name}'.format(user=user, password=password, host=host, port=port, db_name=db_name) return Factory.instance('posgres_client_from_connection_parameters', lambda: PostgresClient(connection_uri) ) def postgres_client_from_connection_os_variable(db_uri_os_valiable_name='LOCAL_PG_DB_URI'): connection_uri = os.getenv(db_uri_os_valiable_name) return Factory.instance('posgres_client_from_connection_parameters', lambda: PostgresClient(connection_uri) )
Use Factory class from module instead of from __init__
[HOUSEKEEPING] Use Factory class from module instead of from __init__
Python
mit
aleasoluciones/infcommon,aleasoluciones/infcommon
dbbd29a1cdfcd3f11a968c0aeb38bd54ef7014e3
gfusion/tests/test_main.py
gfusion/tests/test_main.py
"""Tests for main.py""" from ..main import _solve_weight_vector import numpy as np from nose.tools import assert_raises, assert_equal, assert_true def test_solve_weight_vector(): # smoke test n_nodes = 4 n_communities = 2 n_similarities = 3 delta = 0.3 similarities = np.random.random((n_similarities, n_nodes * (n_nodes-1)/2)) * 10 grouping_matrix = np.random.random((n_nodes, n_communities)) weight = _solve_weight_vector(similarities, grouping_matrix, delta) assert_equal(weight.ndim, 2) assert_equal(weight.shape[1], n_similarities) assert_true(np.all(weight >= 0)) # check raises assert_raises(ValueError, _solve_weight_vector, similarities, grouping_matrix, -1) similarities_invalid = similarities.copy() similarities_invalid[0, 3] = -4. assert_raises(ValueError, _solve_weight_vector, similarities_invalid, grouping_matrix, delta)
"""Tests for main.py""" from ..main import _solve_weight_vector import numpy as np from numpy.testing import assert_array_almost_equal from nose.tools import assert_raises, assert_equal, assert_true def test_solve_weight_vector(): # smoke test n_nodes = 4 n_communities = 2 n_similarities = 3 delta = 0.3 similarities = np.random.random((n_similarities, n_nodes * (n_nodes-1)/2)) * 10 grouping_matrix = np.random.random((n_nodes, n_communities)) weight = _solve_weight_vector(similarities, grouping_matrix, delta) assert_equal(weight.ndim, 2) assert_equal(weight.shape[1], n_similarities) assert_true(np.all(weight >= 0)) # check raises assert_raises(ValueError, _solve_weight_vector, similarities, grouping_matrix, -1) similarities_invalid = similarities.copy() similarities_invalid[0, 3] = -4. assert_raises(ValueError, _solve_weight_vector, similarities_invalid, grouping_matrix, delta) # if I have two similarities, and one is null + the grouping matrix is all # to all, and delta is 0 (no regularization), then I expect that the weight # vector is [1, 0] similarities = np.vstack((1000*np.ones((1, 6)), np.zeros((1, 6)) )) grouping_matrix = np.ones((4, 4)) delta = 1 assert_array_almost_equal(np.atleast_2d([1., 0.]), _solve_weight_vector(similarities, grouping_matrix, delta))
Add a more semantic test
Add a more semantic test
Python
mit
mvdoc/gfusion
ca28ffbc6d4f981e952709af77139f7a8666319d
client/gettransactions.py
client/gettransactions.py
#!/usr/bin/python import os import sys import api import json import getpass # Banks banks = {} import bankofamerica banks["bankofamerica"] = bankofamerica print "Login" print "Username: ", username = sys.stdin.readline().strip() password = getpass.getpass() if not api.callapi("login",{"username": username, "password": password}): print "Login failed" sys.exit(1) todo = api.callapi("accountstodo") for account in todo: if account["bankname"] not in banks: print "No scraper for %s!" % (account["bankname"]) continue print "Scraping %s..." % (account["bankname"]) if os.getenv("DATAFILE"): data = open(os.getenv("DATAFILE")).read() else: data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str) api.callapi("newtransactions", {"data": data}) api.callapi("logout")
#!/usr/bin/python import os import sys import api import json import getpass sys.path.append("../") import config # Banks banks = {} for bank in config.banks: exec "import %s" % (bank) banks[bank] = eval(bank) print "Login" print "Username: ", username = sys.stdin.readline().strip() password = getpass.getpass() if not api.callapi("login",{"username": username, "password": password}): print "Login failed" sys.exit(1) todo = api.callapi("accountstodo") for account in todo: if account["bankname"] not in banks: print "No scraper for %s!" % (account["bankname"]) continue print "Scraping %s..." % (account["bankname"]) if os.getenv("DATAFILE"): data = open(os.getenv("DATAFILE")).read() else: data = json.dumps(banks[account["bankname"]].downloadaccount(account),default=str) api.callapi("newtransactions", {"data": data}) api.callapi("logout")
Read bank list from config file.
Read bank list from config file.
Python
agpl-3.0
vincebusam/pyWebCash,vincebusam/pyWebCash,vincebusam/pyWebCash
eebae9eb9b941a4e595775434a05df29d55a34f2
tools/conan/conanfile.py
tools/conan/conanfile.py
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <[email protected]>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True]} default_options = {"shared": False} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <[email protected]>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
Build with PIC by default.
Build with PIC by default.
Python
lgpl-2.1
worldforge/varconf,worldforge/varconf,worldforge/varconf,worldforge/varconf
5b2e74cdc50a6c3814879f470d8992267fa06a62
heufybot/utils/__init__.py
heufybot/utils/__init__.py
# Taken from txircd: # https://github.com/ElementalAlchemist/txircd/blob/8832098149b7c5f9b0708efe5c836c8160b0c7e6/txircd/utils.py#L9 def _enum(**enums): return type('Enum', (), enums) ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3) ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3) def isNumber(s): try: float(s) return True except ValueError: return False def parseUserPrefix(prefix): if "!" in prefix: nick = prefix[:prefix.find("!")] ident = prefix[prefix.find("!") + 1:prefix.find("@")] host = prefix[prefix.find("@") + 1:] return nick, ident, host # Not all "users" have idents and hostnames nick = prefix return nick, None, None def networkName(bot, server): return bot.servers[server].supportHelper.network
# Taken from txircd: # https://github.com/ElementalAlchemist/txircd/blob/8832098149b7c5f9b0708efe5c836c8160b0c7e6/txircd/utils.py#L9 def _enum(**enums): return type('Enum', (), enums) ModeType = _enum(LIST=0, PARAM_SET=1, PARAM_UNSET=2, NO_PARAM=3) ModuleLoadType = _enum(LOAD=0, UNLOAD=1, ENABLE=2, DISABLE=3) def isNumber(s): try: float(s) return True except ValueError: return False def parseUserPrefix(prefix): if prefix is None: prefix = "" if "!" in prefix: nick = prefix[:prefix.find("!")] ident = prefix[prefix.find("!") + 1:prefix.find("@")] host = prefix[prefix.find("@") + 1:] return nick, ident, host # Not all "users" have idents and hostnames nick = prefix return nick, None, None def networkName(bot, server): return bot.servers[server].supportHelper.network
Fix the handling of missing prefixes
Fix the handling of missing prefixes Twisted defaults to an empty string, while IRCBase defaults to None.
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
55b7b07986590c4ab519fcda3c973c87ad23596b
flask_admin/model/typefmt.py
flask_admin/model/typefmt.py
from jinja2 import Markup def null_formatter(value): """ Return `NULL` as the string for `None` value :param value: Value to check """ return Markup('<i>NULL</i>') def empty_formatter(value): """ Return empty string for `None` value :param value: Value to check """ return '' def bool_formatter(value): """ Return check icon if value is `True` or empty string otherwise. :param value: Value to check """ return Markup('<i class="icon-ok"></i>' if value else '') DEFAULT_FORMATTERS = { type(None): empty_formatter, bool: bool_formatter }
from jinja2 import Markup def null_formatter(value): """ Return `NULL` as the string for `None` value :param value: Value to check """ return Markup('<i>NULL</i>') def empty_formatter(value): """ Return empty string for `None` value :param value: Value to check """ return '' def bool_formatter(value): """ Return check icon if value is `True` or empty string otherwise. :param value: Value to check """ return Markup('<i class="icon-ok"></i>' if value else '') def list_formatter(values): """ Return string with comma separated values :param values: Value to check """ return u', '.join(values) DEFAULT_FORMATTERS = { type(None): empty_formatter, bool: bool_formatter, list: list_formatter, }
Add extra type formatter for `list` type
Add extra type formatter for `list` type
Python
bsd-3-clause
mrjoes/flask-admin,janusnic/flask-admin,Kha/flask-admin,wuxiangfeng/flask-admin,litnimax/flask-admin,HermasT/flask-admin,quokkaproject/flask-admin,Kha/flask-admin,flabe81/flask-admin,porduna/flask-admin,Junnplus/flask-admin,ibushong/test-repo,janusnic/flask-admin,jschneier/flask-admin,closeio/flask-admin,chase-seibert/flask-admin,litnimax/flask-admin,ArtemSerga/flask-admin,flask-admin/flask-admin,NickWoodhams/flask-admin,LennartP/flask-admin,late-warrior/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,mikelambert/flask-admin,jamesbeebop/flask-admin,quokkaproject/flask-admin,mrjoes/flask-admin,pawl/flask-admin,jschneier/flask-admin,toddetzel/flask-admin,rochacbruno/flask-admin,ArtemSerga/flask-admin,Junnplus/flask-admin,torotil/flask-admin,ondoheer/flask-admin,plaes/flask-admin,AlmogCohen/flask-admin,plaes/flask-admin,wangjun/flask-admin,dxmo/flask-admin,jmagnusson/flask-admin,marrybird/flask-admin,torotil/flask-admin,wuxiangfeng/flask-admin,CoolCloud/flask-admin,toddetzel/flask-admin,lifei/flask-admin,ondoheer/flask-admin,phantomxc/flask-admin,mikelambert/flask-admin,mrjoes/flask-admin,petrus-jvrensburg/flask-admin,CoolCloud/flask-admin,wangjun/flask-admin,iurisilvio/flask-admin,petrus-jvrensburg/flask-admin,lifei/flask-admin,mikelambert/flask-admin,sfermigier/flask-admin,radioprotector/flask-admin,wuxiangfeng/flask-admin,petrus-jvrensburg/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,jschneier/flask-admin,litnimax/flask-admin,flask-admin/flask-admin,petrus-jvrensburg/flask-admin,plaes/flask-admin,ibushong/test-repo,flask-admin/flask-admin,torotil/flask-admin,radioprotector/flask-admin,rochacbruno/flask-admin,wuxiangfeng/flask-admin,HermasT/flask-admin,LennartP/flask-admin,marrybird/flask-admin,dxmo/flask-admin,flask-admin/flask-admin,phantomxc/flask-admin,LennartP/flask-admin,chase-seibert/flask-admin,plaes/flask-admin,marrybird/flask-admin,mikelambert/flask-admin,wangjun/flask-admin,ArtemSerga/flask-admin,AlmogCohen/flask-admin,AlmogCohen/flask-admin,ondoheer/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,flabe81/flask-admin,AlmogCohen/flask-admin,lifei/flask-admin,jmagnusson/flask-admin,mrjoes/flask-admin,pawl/flask-admin,torotil/flask-admin,likaiguo/flask-admin,HermasT/flask-admin,flabe81/flask-admin,porduna/flask-admin,iurisilvio/flask-admin,NickWoodhams/flask-admin,late-warrior/flask-admin,porduna/flask-admin,radioprotector/flask-admin,chase-seibert/flask-admin,CoolCloud/flask-admin,toddetzel/flask-admin,betterlife/flask-admin,betterlife/flask-admin,lifei/flask-admin,porduna/flask-admin,quokkaproject/flask-admin,rochacbruno/flask-admin,jschneier/flask-admin,late-warrior/flask-admin,pawl/flask-admin,toddetzel/flask-admin,phantomxc/flask-admin,late-warrior/flask-admin,wangjun/flask-admin,ondoheer/flask-admin,ibushong/test-repo,jmagnusson/flask-admin,CoolCloud/flask-admin,closeio/flask-admin,ArtemSerga/flask-admin,jamesbeebop/flask-admin,janusnic/flask-admin,marrybird/flask-admin,jamesbeebop/flask-admin,LennartP/flask-admin,phantomxc/flask-admin,Kha/flask-admin,radioprotector/flask-admin,flabe81/flask-admin,betterlife/flask-admin,sfermigier/flask-admin,jamesbeebop/flask-admin,closeio/flask-admin,Kha/flask-admin,Junnplus/flask-admin,Junnplus/flask-admin,ibushong/test-repo,dxmo/flask-admin,NickWoodhams/flask-admin,NickWoodhams/flask-admin,quokkaproject/flask-admin,sfermigier/flask-admin,likaiguo/flask-admin,HermasT/flask-admin,litnimax/flask-admin,jmagnusson/flask-admin,dxmo/flask-admin,betterlife/flask-admin,chase-seibert/flask-admin,janusnic/flask-admin
e9eb8e0c4e77525d31c904e7f401a0d388a3fbf6
app/utils.py
app/utils.py
from flask import url_for def register_template_utils(app): """Register Jinja 2 helpers (called from __init__.py).""" @app.template_test() def equalto(value, other): return value == other @app.template_global() def is_hidden_field(field): from wtforms.fields import HiddenField return isinstance(field, HiddenField) app.add_template_global(index_for_role) def index_for_role(role): return url_for(role.name + '.index')
from flask import url_for def register_template_utils(app): """Register Jinja 2 helpers (called from __init__.py).""" @app.template_test() def equalto(value, other): return value == other @app.template_global() def is_hidden_field(field): from wtforms.fields import HiddenField return isinstance(field, HiddenField) app.add_template_global(index_for_role) def index_for_role(role): return url_for(role.index)
Fix index_for_role function to use index field in Role class.
Fix index_for_role function to use index field in Role class.
Python
mit
hack4impact/women-veterans-rock,hack4impact/women-veterans-rock,hack4impact/women-veterans-rock
ad757857b7878904c6d842e115074c4fac24bed7
tweetar.py
tweetar.py
import twitter import urllib2 NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT" def retrieve_and_post(conf): post = False pull_url = NOAA_URL.replace('*station_id*', conf['station']) request = urllib2.Request(pull_url, None) response = urllib2.urlopen(request) metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response if getattr(conf, 'hashtag', False): metar = '%s #%s' % (metar, conf['hashtag']) api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password']) # get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway past_statuses = api.GetUserTimeline(conf['twitter_user']) if past_statuses[-0].text != metar: post = True if post: api.PostUpdate(metar) if __name__ == '__main__': retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
import twitter import urllib2 NOAA_URL = "http://weather.noaa.gov/pub/data/observations/metar/stations/*station_id*.TXT" def retrieve_and_post(conf): post = False pull_url = NOAA_URL.replace('*station_id*', conf['station']) request = urllib2.Request(pull_url, None) response = urllib2.urlopen(request) metar = response.read().split('\n')[1] # NOAA includes a "real" timestamp as the first line of the response if conf.get('hashtag', False): metar = '%s #%s' % (metar, conf['hashtag']) api = twitter.Api(username=conf['twitter_user'], password=conf['twitter_password']) # get the last posted message and make sure it's different before attempting to post. Twitter isn't supposed to allow dupes through but I'm seeing it happen anyway past_statuses = api.GetUserTimeline(conf['twitter_user']) if past_statuses[-0].text != metar: post = True if post: api.PostUpdate(metar) if __name__ == '__main__': retrieve_and_post({'station': '<station_id>', 'twitter_user': '<twitter_user>', 'twitter_password': '<twitter_pass>'})
Use .get instead of getattr, dummy.
Use .get instead of getattr, dummy.
Python
bsd-3-clause
adamfast/python-tweetar
19b7946dd57b79e49d1d54d980d295435793e465
slackminion/__main__.py
slackminion/__main__.py
import logging from config import * from slackminion.bot import Bot def main(): level = logging.DEBUG if DEBUG else logging.INFO logging.basicConfig(level=level, format='%(asctime)s %(name)s %(levelname)s: %(message)s') bot = Bot(SLACK_TOKEN) bot.start() bot.run() bot.stop() if __name__ == "__main__": main()
import logging import sys sys.path.append('.') from config import * from slackminion.bot import Bot def main(): level = logging.DEBUG if DEBUG else logging.INFO logging.basicConfig(level=level, format='%(asctime)s %(name)s %(levelname)s: %(message)s') bot = Bot(SLACK_TOKEN) bot.start() bot.run() bot.stop() if __name__ == "__main__": main()
Allow importing of config from current directory
Allow importing of config from current directory
Python
mit
arcticfoxnv/slackminion,arcticfoxnv/slackminion
0461fad1a3d81aa2d937a1734f1ebb07b3e81d79
undercloud_heat_plugins/server_update_allowed.py
undercloud_heat_plugins/server_update_allowed.py
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.engine.resources.openstack.nova import server class ServerUpdateAllowed(server.Server): '''Prevent any properties changes from replacing an existing server. ''' update_allowed_properties = server.Server.properties_schema.keys() def resource_mapping(): return {'OS::Nova::Server': ServerUpdateAllowed}
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from heat.engine.resources.openstack.nova import server class ServerUpdateAllowed(server.Server): '''Prevent any properties changes from replacing an existing server. ''' update_allowed_properties = server.Server.properties_schema.keys() def needs_replace_with_prop_diff(self, changed_properties_set, after_props, before_props): return False def resource_mapping(): return {'OS::Nova::Server': ServerUpdateAllowed}
Fix no-replace-server to accurately preview update
Fix no-replace-server to accurately preview update This override of OS::Nova::Server needs to reflect the fact that it never replaces on update or the update --dry-run output ends up being wrong. Closes-Bug: 1561076 Change-Id: I9256872b877fbe7f91befb52995c62de006210ef
Python
apache-2.0
openstack/tripleo-common,openstack/tripleo-common
1514da45e5d89a2ab426ed62f807d137e9c318e9
labonneboite/common/pdf.py
labonneboite/common/pdf.py
# coding: utf8 import errno import logging import os from slugify import slugify from labonneboite.conf import settings logger = logging.getLogger('main') def get_file_path(office): file_path = "pdf/%s/%s/%s/%s.pdf" % (office.departement, office.naf, slugify(office.name.strip()[0]), office.siret) full_path = os.path.join(settings.GLOBAL_STATIC_PATH, file_path) return full_path def write_file(office, data): filename = get_file_path(office) if not os.path.exists(os.path.dirname(filename)): try: os.makedirs(os.path.dirname(filename)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise with open(filename, "w") as f: f.write(data) f.close() logger.info("wrote PDF file to %s", filename) def delete_file(office): filename = get_file_path(office) if os.path.exists(filename): os.remove(filename)
# coding: utf8 import errno import logging import os from slugify import slugify from labonneboite.conf import settings logger = logging.getLogger('main') def get_file_path(office): file_path = "pdf/%s/%s/%s/%s.pdf" % (office.departement, office.naf, slugify(office.name.strip()[0]), office.siret) full_path = os.path.join(settings.GLOBAL_STATIC_PATH, file_path) return full_path def write_file(office, data): filename = get_file_path(office) if not os.path.exists(os.path.dirname(filename)): try: os.makedirs(os.path.dirname(filename)) except OSError as exc: # Guard against race condition if exc.errno != errno.EEXIST: raise with open(filename, "w") as f: f.write(data) f.close() logger.info("wrote PDF file to %s", filename) def delete_file(office): # FIXME : Works only on one front-end... filename = get_file_path(office) if os.path.exists(filename): os.remove(filename)
Add FIXME for delete PDF
Add FIXME for delete PDF
Python
agpl-3.0
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
84a2aa1187cf7a9ec7593920d9ad0708b7d28f55
sqlobject/tests/test_pickle.py
sqlobject/tests/test_pickle.py
import pickle from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Pickle instances ######################################## class TestPickle(SQLObject): question = StringCol() answer = IntCol() test_question = 'The Ulimate Question of Life, the Universe and Everything' test_answer = 42 def test_pickleCol(): setupClass(TestPickle) connection = TestPickle._connection test = TestPickle(question=test_question, answer=test_answer) pickle_data = pickle.dumps(test, pickle.HIGHEST_PROTOCOL) connection.cache.clear() test = pickle.loads(pickle_data) test2 = connection.cache.tryGet(test.id, TestPickle) assert test2 is test assert test.question == test_question assert test.answer == test_answer if (connection.dbName == 'sqlite') and connection._memory: return # The following test requires a different connection test = TestPickle.get(test.id, connection=getConnection(registry='')) # to make a different DB URI # and open another connection raises(pickle.PicklingError, pickle.dumps, test, pickle.HIGHEST_PROTOCOL)
import pickle from sqlobject import * from sqlobject.tests.dbtest import * ######################################## ## Pickle instances ######################################## class TestPickle(SQLObject): question = StringCol() answer = IntCol() test_question = 'The Ulimate Question of Life, the Universe and Everything' test_answer = 42 def test_pickleCol(): setupClass(TestPickle) connection = TestPickle._connection test = TestPickle(question=test_question, answer=test_answer) pickle_data = pickle.dumps(test, pickle.HIGHEST_PROTOCOL) connection.cache.clear() test = pickle.loads(pickle_data) test2 = connection.cache.tryGet(test.id, TestPickle) assert test2 is test assert test.question == test_question assert test.answer == test_answer if (connection.dbName == 'sqlite') and connection._memory: return # The following test requires a different connection test = TestPickle.get( test.id, # make a different DB URI and open another connection connection=getConnection(registry='')) raises(pickle.PicklingError, pickle.dumps, test, pickle.HIGHEST_PROTOCOL)
Fix flake8 E113 unexpected indentation
Fix flake8 E113 unexpected indentation
Python
lgpl-2.1
drnlm/sqlobject,sqlobject/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
75e7008e65cb731cc2a7a24dfa86f02049032b44
src/settings.py
src/settings.py
# -*- coding: utf-8 -*- import os HERE = os.path.abspath(os.path.dirname(__file__)) PROJECT_ROOT = os.path.abspath(os.path.join(HERE, os.pardir)) class Config: SECRET_KEY = os.environ.get('SECRET_KEY', 'secret-key') SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') SERVER_NAME = os.environ.get('HOST_NAME', 'localhost:5000') class ProdConfig(Config): ENV = 'prod' DEBUG = False class DevConfig(Config): ENV = 'dev' DEBUG = True LOAD_FAKE_DATA = True class TestConfig(Config): SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL', 'postgresql://localhost/') TESTING = True DEBUG = True # For: `nose.proxy.AssertionError: Popped wrong request context.` # http://stackoverflow.com/a/28139033/399726 # https://github.com/jarus/flask-testing/issues/21 PRESERVE_CONTEXT_ON_EXCEPTION = False
# -*- coding: utf-8 -*- import os HERE = os.path.abspath(os.path.dirname(__file__)) PROJECT_ROOT = os.path.abspath(os.path.join(HERE, os.pardir)) class Config: SECRET_KEY = os.environ.get('SECRET_KEY', 'secret-key') SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL', 'postgresql://localhost/') SERVER_NAME = os.environ.get('HOST_NAME', 'localhost:5000') class ProdConfig(Config): ENV = 'prod' DEBUG = False class DevConfig(Config): ENV = 'dev' DEBUG = True LOAD_FAKE_DATA = True class TestConfig(Config): SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL', 'postgresql://localhost/') TESTING = True DEBUG = True # For: `nose.proxy.AssertionError: Popped wrong request context.` # http://stackoverflow.com/a/28139033/399726 # https://github.com/jarus/flask-testing/issues/21 PRESERVE_CONTEXT_ON_EXCEPTION = False
Set production databse to test database
Set production databse to test database
Python
mit
zhoux10/pdfhook,zhoux10/pdfhook,zhoux10/pdfhook
8c0f87858b1dc58d23006cd581cfa74d23096a44
trex/serializers.py
trex/serializers.py
# -*- coding: utf-8 -*- # # (c) 2014 Bjoern Ricks <[email protected]> # # See LICENSE comming with the source of 'trex' for details. # from rest_framework.serializers import HyperlinkedModelSerializer from trex.models.project import Project class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "name", "description", "active", "created")
# -*- coding: utf-8 -*- # # (c) 2014 Bjoern Ricks <[email protected]> # # See LICENSE comming with the source of 'trex' for details. # from rest_framework.serializers import HyperlinkedModelSerializer from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry fields = ("date", "duration", "description", "state", "user", "created")
Add a ProjectDetailSerializer and EntryDetailSerializer
Add a ProjectDetailSerializer and EntryDetailSerializer
Python
mit
bjoernricks/trex,bjoernricks/trex
2e9a6a2babb16f4ed9c3367b21ee28514d1988a8
srm/__main__.py
srm/__main__.py
""" The default module run when imported from the command line and also the main entry point defined in setup.py. Ex: python3 -m srm """ import click from . import __version__, status @click.group() @click.version_option(__version__) def cli() -> None: """Main command-line entry method.""" cli.add_command(status.cli) if __name__ == '__main__': cli()
""" The default module run when imported from the command line and also the main entry point defined in setup.py. Ex: python3 -m srm """ import click from . import __version__, status @click.group() @click.version_option(__version__) def cli() -> None: """Main command-line entry method.""" cli.add_command(status.cli) cli(prog_name='srm')
Set correct program name in 'help' output
Set correct program name in 'help' output
Python
mit
cmcginty/simple-rom-manager,cmcginty/simple-rom-manager
e54d753a3fb58032936cbf5e137bb5ef67e2813c
task_15.py
task_15.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42
#!/usr/bin/env python # -*- coding: utf-8 -*- """Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 THANKS_FOR_THE_FISH = str(NOT_THE_QUESTION) + str(ANSWER)
Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str()
Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str()
Python
mpl-2.0
gracehyemin/is210-week-03-warmup,gracehyemin/is210-week-03-warmup
26df58ea40651ac20845ed4a4a84642e61cdf84a
deployer/__init__.py
deployer/__init__.py
from __future__ import absolute_import import deployer.logger from celery.signals import setup_logging __version__ = '0.1.10' __author__ = 'sukrit' deployer.logger.init_logging('root') setup_logging.connect(deployer.logger.init_celery_logging)
from __future__ import absolute_import import deployer.logger from celery.signals import setup_logging __version__ = '0.2' __author__ = 'sukrit' deployer.logger.init_logging('root') setup_logging.connect(deployer.logger.init_celery_logging)
Upgrade to 0.2 : Centralized logging support
Upgrade to 0.2 : Centralized logging support
Python
mit
totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer
e55a1e97a191dd5c9fc43a1d2ed0b723ac64f46a
stacker/logger/__init__.py
stacker/logger/__init__.py
import sys import logging DEBUG_FORMAT = ("[%(asctime)s] %(levelname)s %(name)s:%(lineno)d" "(%(funcName)s): %(message)s") INFO_FORMAT = ("[%(asctime)s] %(message)s") COLOR_FORMAT = ("[%(asctime)s] \033[%(color)sm%(message)s\033[39m") ISO_8601 = "%Y-%m-%dT%H:%M:%S" class ColorFormatter(logging.Formatter): """ Handles colorizing formatted log messages if color provided. """ def format(self, record): if 'color' not in record.__dict__: record.__dict__['color'] = 37 msg = super(ColorFormatter, self).format(record) return msg def setup_logging(verbosity): log_level = logging.INFO log_format = INFO_FORMAT if sys.stdout.isatty(): log_format = COLOR_FORMAT if verbosity > 0: log_level = logging.DEBUG log_format = DEBUG_FORMAT if verbosity < 2: logging.getLogger("botocore").setLevel(logging.CRITICAL) hdlr = logging.StreamHandler() hdlr.setFormatter(ColorFormatter(log_format, ISO_8601)) logging.root.addHandler(hdlr) logging.root.setLevel(log_level)
import sys import logging DEBUG_FORMAT = ("[%(asctime)s] %(levelname)s %(threadName)s " "%(name)s:%(lineno)d(%(funcName)s): %(message)s") INFO_FORMAT = ("[%(asctime)s] %(message)s") COLOR_FORMAT = ("[%(asctime)s] \033[%(color)sm%(message)s\033[39m") ISO_8601 = "%Y-%m-%dT%H:%M:%S" class ColorFormatter(logging.Formatter): """ Handles colorizing formatted log messages if color provided. """ def format(self, record): if 'color' not in record.__dict__: record.__dict__['color'] = 37 msg = super(ColorFormatter, self).format(record) return msg def setup_logging(verbosity): log_level = logging.INFO log_format = INFO_FORMAT if sys.stdout.isatty(): log_format = COLOR_FORMAT if verbosity > 0: log_level = logging.DEBUG log_format = DEBUG_FORMAT if verbosity < 2: logging.getLogger("botocore").setLevel(logging.CRITICAL) hdlr = logging.StreamHandler() hdlr.setFormatter(ColorFormatter(log_format, ISO_8601)) logging.root.addHandler(hdlr) logging.root.setLevel(log_level)
Include threadName in DEBUG format
Include threadName in DEBUG format
Python
bsd-2-clause
remind101/stacker,remind101/stacker
84e7ebd3df1fc7b9cb52fa5c7fcb3af87e6b454d
playserver/trackchecker.py
playserver/trackchecker.py
from threading import Timer from . import track _listeners = [] class TrackChecker(): def __init__(self, interval = 5): self.listeners = [] self.CHECK_INTERVAL = interval self.currentSong = "" self.currentArtist = "" self.currentAlbum = "" self.timer = None def checkSong(self): song = track.getCurrentSong() artist = track.getCurrentArtist() album = track.getCurrentAlbum() if (song != self.currentSong or artist != self.currentArtist or album != self.currentAlbum): self.currentSong = song self.currentArtist = artist self.currentAlbum = album self._callListeners() if self.timer != None: self.startTimer() def registerListener(self, function): _listeners.append(function) def _callListeners(self): for listener in _listeners: listener() def startTimer(self): self.timer = Timer(self.CHECK_INTERVAL, self.checkSong) timer.daemon = True self.timer.start() def cancelTimer(self): self.timer.cancel() self.timer = None
from threading import Timer from . import track _listeners = [] class TrackChecker(): def __init__(self, interval = 5): self.listeners = [] self.CHECK_INTERVAL = interval self.currentSong = "" self.currentArtist = "" self.currentAlbum = "" self.timer = None def checkSong(self): song = track.getCurrentSong() artist = track.getCurrentArtist() album = track.getCurrentAlbum() if (song != self.currentSong or artist != self.currentArtist or album != self.currentAlbum): self.currentSong = song self.currentArtist = artist self.currentAlbum = album self._callListeners() if self.timer != None: self.startTimer() def registerListener(self, function): _listeners.append(function) def _callListeners(self): for listener in _listeners: listener() def startTimer(self): self.timer = Timer(self.CHECK_INTERVAL, self.checkSong) self.timer.daemon = True self.timer.start() def cancelTimer(self): self.timer.cancel() self.timer = None
Fix name error in setting daemon
Fix name error in setting daemon
Python
mit
ollien/playserver,ollien/playserver,ollien/playserver
1c3d2669d9bbe1aa3f4387f864d9085945e9257c
zeus/api/schemas/hook.py
zeus/api/schemas/hook.py
from base64 import urlsafe_b64encode from datetime import timedelta from marshmallow import Schema, fields, post_load from zeus.models import Hook from zeus.utils import timezone class HookSchema(Schema): id = fields.UUID(dump_only=True) provider = fields.Str() token = fields.Method('get_token', dump_only=True) secret_uri = fields.Method('get_secret_uri', dump_only=True) public_uri = fields.Method('get_public_uri', dump_only=True) created_at = fields.DateTime(attribute="date_created", dump_only=True) @post_load def make_hook(self, data): return Hook(**data) def get_token(self, obj): # we allow visibility of tokens for 24 hours if obj.date_created > timezone.now() - timedelta(days=1): return urlsafe_b64encode(obj.token).decode('utf-8') return None def get_public_uri(self, obj): return '/hooks/{}'.format(str(obj.id)) def get_secret_uri(self, obj): return '/hooks/{}/{}'.format(str(obj.id), obj.get_signature())
from base64 import urlsafe_b64encode from datetime import timedelta from marshmallow import Schema, fields, post_load from zeus.models import Hook from zeus.utils import timezone class HookSchema(Schema): id = fields.UUID(dump_only=True) provider = fields.Str() token = fields.Method('get_token', dump_only=True) secret_uri = fields.Method('get_secret_uri', dump_only=True) public_uri = fields.Method('get_public_uri', dump_only=True) created_at = fields.DateTime(attribute="date_created", dump_only=True) @post_load def make_hook(self, data): return Hook(**data) def get_token(self, obj): # we allow visibility of tokens for 24 hours if obj.date_created > timezone.now() - timedelta(days=1): return urlsafe_b64encode(obj.token).decode('utf-8') return None def get_public_uri(self, obj): return '/hooks/{}/public'.format(str(obj.id)) def get_secret_uri(self, obj): return '/hooks/{}/{}'.format(str(obj.id), obj.get_signature())
Fix base url for public generation
Fix base url for public generation
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
6c2dae9bad86bf3f40d892eba50853d704f696b7
pombola/settings/tests.py
pombola/settings/tests.py
from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner' ) + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), }
from .base import * COUNTRY_APP = None INSTALLED_APPS = INSTALLED_APPS + \ ('pombola.hansard', 'pombola.projects', 'pombola.place_data', 'pombola.votematch', 'speeches', 'pombola.spinner', 'pombola.interests_register') + \ APPS_REQUIRED_BY_SPEECHES # create the ENABLED_FEATURES hash that is used to toggle features on and off. ENABLED_FEATURES = {} for key in ALL_OPTIONAL_APPS: # add in the optional apps ENABLED_FEATURES[key] = ('pombola.' + key in INSTALLED_APPS) or (key in INSTALLED_APPS) BREADCRUMB_URL_NAME_MAPPINGS = { 'organisation' : ('Organisations', '/organisation/all/'), }
Make sure that the interests_register tables are created
Make sure that the interests_register tables are created Nose tries to run the interests_register tests, but they will fail unless the interest_register app is added to INSTALLED_APPS, because its tables won't be created in the test database.
Python
agpl-3.0
patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,patricmutwiri/pombola,geoffkilpin/pombola,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola,mysociety/pombola,hzj123/56th
a5ce35c44938d37aa9727d37c0cbe0232b8e92d3
socializr/management/commands/socializr_update.py
socializr/management/commands/socializr_update.py
''' Main command which is meant to be run daily to get the information from various social networks into the local db. ''' import traceback from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ImproperlyConfigured from socializr.base import get_socializr_configs class Command(BaseCommand): help = 'Performs the oauth2 dance and save the creds for future use.' def handle(self, *args, **options): configs = get_socializr_configs() for config_class in configs: config_obj = config_class() self.stdout.write("Processing {}".format(config_class.__name__)) try: config_obj.collect() except Exception: self.stderr.write("There was an exception processing {}".format(config_class.__name__)) traceback.print_exc()
''' Main command which is meant to be run daily to get the information from various social networks into the local db. ''' import traceback from django.core.management.base import BaseCommand, CommandError from django.core.exceptions import ImproperlyConfigured from socializr.base import get_socializr_configs class Command(BaseCommand): help = 'Performs the oauth2 dance and save the creds for future use.' def handle(self, *args, **options): configs = get_socializr_configs() for config_class in configs: config_obj = config_class() try: config_obj.collect() except Exception: self.stderr.write("There was an exception processing {}".format(config_class.__name__)) traceback.print_exc()
Remove output expect when there is an error.
Remove output expect when there is an error.
Python
mit
CIGIHub/django-socializr,albertoconnor/django-socializr
c8a41bbf11538dbc17de12e32ba5af5e93fd0b2c
src/utils/plugins.py
src/utils/plugins.py
from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, display_name=cls.display_name, press_wide=cls.press_wide, defaults={'version': cls.version, 'enabled': True}, ) return plugin, created
from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: print('Plugin updated: {0} -> {1}'.format(cls.version, plugin.version)) plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, defaults={ 'display_name': cls.display_name, 'version': cls.version, 'enabled': True, 'press_wide': cls.press_wide, }, ) return plugin, created @classmethod def get_self(cls): try: plugin = models.Plugin.objects.get( name=cls.short_name, ) except models.Plugin.MultipleObjectsReturned: plugin = models.Plugin.objects.filter( name=cls.short_name, ).order_by( '-version' ).first() except models.Plugin.DoesNotExist: return None return plugin
Add get_self and change get_or_create to avoid mis-creation.
Add get_self and change get_or_create to avoid mis-creation.
Python
agpl-3.0
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
17499a12c9216d1db907305ef30f58970cd0932e
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/api/serializers.py
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/api/serializers.py
from django.contrib.auth import get_user_model from rest_framework import serializers User = get_user_model() class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ["username", "email", "name", "url"] extra_kwargs = { "url": {"view_name": "api:user-detail", "lookup_field": "username"} }
from django.contrib.auth import get_user_model from rest_framework import serializers User = get_user_model() class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ["username", "name", "url"] extra_kwargs = { "url": {"view_name": "api:user-detail", "lookup_field": "username"} }
Remove email from User API
Remove email from User API
Python
bsd-3-clause
pydanny/cookiecutter-django,ryankanno/cookiecutter-django,pydanny/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,pydanny/cookiecutter-django,trungdong/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,pydanny/cookiecutter-django
e32d95c40dcfa4d3eb07572d5fd4f0fda710c64c
indra/sources/phosphoELM/api.py
indra/sources/phosphoELM/api.py
import csv ppelm_s3_key = '' def process_from_dump(fname=None, delimiter='\t'): ppelm_json = [] if fname is None: # ToDo Get from S3 pass else: with open(fname, 'r') as f: csv_reader = csv.reader(f.readlines(), delimiter=delimiter) columns = next(csv_reader) for entry in csv_reader: row_dict = {columns[n]: entry[n] for n in range(len(columns))} ppelm_json.append(row_dict) return ppelm_json
import csv ppelm_s3_key = '' def process_from_dump(fname=None, delimiter='\t'): if fname is None: # ToDo Get from S3 return [] else: with open(fname, 'r') as f: csv_reader = csv.reader(f.readlines(), delimiter=delimiter) ppelm_json = _get_json_from_entry_rows(csv_reader) return ppelm_json def _get_json_from_entry_rows(row_iter): ppelm_json = [] columns = next(row_iter) for entry in row_iter: row_dict = {columns[n]: entry[n] for n in range(len(columns))} ppelm_json.append(row_dict) return ppelm_json
Move iterator to own function
Move iterator to own function
Python
bsd-2-clause
sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra
c5671ab2e5115ce9c022a97a088300dc408e2aa4
opendc/util/path_parser.py
opendc/util/path_parser.py
import json import sys import re def parse(version, endpoint_path): """Map an HTTP call to an API path""" with open('opendc/api/{}/paths.json'.format(version)) as paths_file: paths = json.load(paths_file) endpoint_path_parts = endpoint_path.split('/') paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)] for path_parts in paths_parts: found = True for (endpoint_part, part) in zip(endpoint_path_parts, path_parts): print endpoint_part, part if not part.startswith('{') and endpoint_part != part: found = False break if found: sys.stdout.flush() return '{}/{}'.format(version, '/'.join(path_parts)) return None
import json import sys import re def parse(version, endpoint_path): """Map an HTTP endpoint path to an API path""" with open('opendc/api/{}/paths.json'.format(version)) as paths_file: paths = json.load(paths_file) endpoint_path_parts = endpoint_path.strip('/').split('/') paths_parts = [x.split('/') for x in paths if len(x.split('/')) == len(endpoint_path_parts)] for path_parts in paths_parts: found = True for (endpoint_part, part) in zip(endpoint_path_parts, path_parts): if not part.startswith('{') and endpoint_part != part: found = False break if found: sys.stdout.flush() return '{}/{}'.format(version, '/'.join(path_parts)) return None
Make path parser robust to trailing /
Make path parser robust to trailing /
Python
mit
atlarge-research/opendc-web-server,atlarge-research/opendc-web-server
87844a776c2d409bdf7eaa99da06d07d77d7098e
tests/test_gingerit.py
tests/test_gingerit.py
import pytest from gingerit.gingerit import GingerIt @pytest.mark.parametrize("text,expected", [ ( "The smelt of fliwers bring back memories.", "The smell of flowers brings back memories." ), ( "Edwards will be sck yesterday", "Edwards was sick yesterday" ), ( "Edwards was sick yesterday.", "Edwards was sick yesterday." ), ( "", "" ) ]) def test_gingerit(text, expected): parser = GingerIt() assert parser.parse(text)["result"] == expected
import pytest from gingerit.gingerit import GingerIt @pytest.mark.parametrize("text,expected,corrections", [ ( "The smelt of fliwers bring back memories.", "The smell of flowers brings back memories.", [ {'start': 21, 'definition': None, 'correct': u'brings', 'text': 'bring'}, {'start': 13, 'definition': u'a plant cultivated for its blooms or blossoms', 'correct': u'flowers', 'text': 'fliwers'}, {'start': 4, 'definition': None, 'correct': u'smell', 'text': 'smelt'} ] ), ( "Edwards will be sck yesterday", "Edwards was sick yesterday", [ {'start': 16, 'definition': u'affected by an impairment of normal physical or mental function', 'correct': u'sick', 'text': 'sck'}, {'start': 8, 'definition': None, 'correct': u'was', 'text': 'will be'} ] ), ( "Edwards was sick yesterday.", "Edwards was sick yesterday.", [] ), ( "", "", [] ) ]) def test_gingerit(text, expected, corrections): output = GingerIt().parse(text) assert output["result"] == expected assert output["corrections"] == corrections
Extend test to cover corrections output
Extend test to cover corrections output
Python
mit
Azd325/gingerit
b47d7b44030a8388d1860316c117e02796ba9ccc
__init__.py
__init__.py
# -*- coding: utf-8 -*- """ FLUID DYNAMICS SIMULATOR Requires Python 2.7 and OpenCV 2 Tom Blanchet (c) 2013 - 2014 (revised 2017) """ import cv2 from fluidDoubleCone import FluidDoubleCone, RGB from fluidDoubleConeView import FluidDCViewCtrl def slow_example(): inImg = cv2.imread("gameFace.jpg") game_face_dCone = FluidDoubleCone(inImg, RGB) ui = FluidDCViewCtrl(game_face_dCone, 615, 737) def fast_example(): inImg = cv2.imread("fastKid.png") fast_kid_dCone = FluidDoubleCone(inImg, RGB) ui = FluidDCViewCtrl(fast_kid_dCone, 200*2, 200*2) ui.mainloop() if __name__ == "__main__": fast_example()
# -*- coding: utf-8 -*- """ FLUID DYNAMICS SIMULATOR Requires Python 2.7, pygame, numpy, and OpenCV 2 Tom Blanchet (c) 2013 - 2014 (revised 2017) """ import cv2 from fluidDoubleCone import FluidDoubleCone, RGB from fluidDoubleConeView import FluidDCViewCtrl def slow_example(): inImg = cv2.imread("gameFace.jpg") game_face_dCone = FluidDoubleCone(inImg, RGB) ui = FluidDCViewCtrl(game_face_dCone, 615, 737) def fast_example(): inImg = cv2.imread("fastKid.png") fast_kid_dCone = FluidDoubleCone(inImg, RGB) ui = FluidDCViewCtrl(fast_kid_dCone, 200*2, 200*2) ui.mainloop() if __name__ == "__main__": fast_example()
Include pygame and numpy in discription.
Include pygame and numpy in discription.
Python
apache-2.0
FrogBomb/fluidDynamicsSimulator
6447f7f025d00006300ec17668a479214206b09a
examples/guess/submissions/wrong_answer/guess_modulo.py
examples/guess/submissions/wrong_answer/guess_modulo.py
#!/usr/bin/env python3 for i in range(1000): print(337*i%1000+1)
#!/usr/bin/env python3 import sys # This submission exposes a bug in an older version of the output validator # where the valid range of possible submissions becomes empty because the range # of possible values was updated incorrectly. # The expected verdict is WA, but with the bug present, it would give RTE. min_hidden = 1 max_hidden = 1000 for guess in [400, 700, 1, 600]: print(guess) ans = input() if ans == 'correct': break elif ans == 'lower': max_hidden = min(max_hidden, guess-1) elif ans == 'higher': min_hidden = max(min_hidden, guess-1) else: assert False assert min_hidden <= max_hidden
Fix counterexample to be more explicit
Fix counterexample to be more explicit
Python
mit
Kattis/problemtools,Kattis/problemtools,Kattis/problemtools,Kattis/problemtools
c823a476b265b46d27b221831be952a811fe3468
ANN.py
ANN.py
class Neuron: pass class NeuronNetwork: neurons = []
class Neuron: pass class NeuronNetwork: neurons = [] def __init__(self, rows, columns): self.neurons = [] for row in xrange(rows): self.neurons.append([]) for column in xrange(columns): self.neurons[row].append(Neuron())
Create 2D list of Neurons in NeuronNetwork's init
Create 2D list of Neurons in NeuronNetwork's init
Python
mit
tysonzero/py-ann
168937c586b228c05ada2da79a55c9416c3180d3
antifuzz.py
antifuzz.py
''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html def main(): # Take in file ogFile = sys.argv[1] # Make copy of file newFile = sys.argv[2] # Mess with the given file cmd(['lame','--quiet', '--scale', '1', ogFile]) print cmd(['mv', ogFile + ".mp3", newFile]) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(newFile) # Compare the hashes #print ogHash print ssdeep.compare(ogHash, newHash) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html import argparse def main(): parser = argparse.ArgumentParser() parser.add_argument("originalFile", help="File to antifuzz") parser.add_argument("newFile", help="Name of the antifuzzed file") args = parser.parse_args() # Take in file ogFile = args.originalFile # Make copy of file nFile = args.newFile # Mess with the given file mp3(ogFile, nFile) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(nFile) # Compare the hashes #print ogHash diff=str(ssdeep.compare(ogHash, newHash)) print("The files are " + diff + "% different") def mp3(ogFile, newFile): cmd(['lame','--quiet', '--scale', '1', ogFile]) cmd(['mv', ogFile + ".mp3", newFile]) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
Add help, make output more user friendly
Add help, make output more user friendly
Python
mit
ForensicTools/antifuzzyhashing-475-2161_Keenan_Frank
0418a4a2e2cf2dc6e156c880600491691a57c525
setup.py
setup.py
from setuptools import setup, find_packages import versioneer with open('requirements.txt') as f: requirements = f.read().splitlines() requirements = ['setuptools'] + requirements setup( name='pyxrf', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), author='Brookhaven National Laboratory', url='https://github.com/NSLS-II/PyXRF', packages=find_packages(), entry_points={'console_scripts': ['pyxrf = pyxrf.gui:run']}, package_data={'pyxrf.view': ['*.enaml'], 'configs': ['*.json']}, include_package_data=True, install_requires=requirements, python_requires='>=3.6', license='BSD', classifiers=['Development Status :: 3 - Alpha', "License :: OSI Approved :: BSD License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Libraries", "Intended Audience :: Science/Research"] )
from setuptools import setup, find_packages import versioneer with open('requirements.txt') as f: requirements = f.read().splitlines() requirements = ['setuptools'] + requirements setup( name='pyxrf', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), author='Brookhaven National Laboratory', url='https://github.com/NSLS-II/PyXRF', packages=find_packages(), entry_points={'console_scripts': ['pyxrf = pyxrf.gui:run']}, package_data={'pyxrf.view': ['*.enaml'], 'configs': ['*.json'], 'pyxrf.core': ['*.yaml']}, include_package_data=True, install_requires=requirements, python_requires='>=3.6', license='BSD', classifiers=['Development Status :: 3 - Alpha', "License :: OSI Approved :: BSD License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Software Development :: Libraries", "Intended Audience :: Science/Research"] )
Include YAML data file in the package
Include YAML data file in the package
Python
bsd-3-clause
NSLS-II-HXN/PyXRF,NSLS-II-HXN/PyXRF,NSLS-II/PyXRF
0866695a2f60538d59277f45a69771664d6dee27
setup.py
setup.py
import sys import platform from setuptools import setup, Extension cpython = platform.python_implementation() == 'CPython' is_glibc = platform.libc_ver()[0] == 'glibc' libc_ok = is_glibc and platform.libc_ver()[1] >= '2.9' windows = sys.platform.startswith('win') min_win_version = windows and sys.version_info >= (3, 5) min_unix_version = not windows and sys.version_info >= (3, 3) if cpython and ((min_unix_version and libc_ok) or min_win_version): _cbor2 = Extension( '_cbor2', # math.h routines are built-in to MSVCRT libraries=['m'] if not windows else [], extra_compile_args=['-std=c99'], sources=[ 'source/module.c', 'source/encoder.c', 'source/decoder.c', 'source/tags.c', 'source/halffloat.c', ] ) kwargs = {'ext_modules': [_cbor2]} else: kwargs = {} setup( use_scm_version={ 'version_scheme': 'post-release', 'local_scheme': 'dirty-tag' }, setup_requires=[ 'setuptools >= 36.2.7', 'setuptools_scm >= 1.7.0' ], **kwargs )
import sys import platform from setuptools import setup, Extension cpython = platform.python_implementation() == 'CPython' is_glibc = platform.libc_ver()[0] == 'glibc' if is_glibc: glibc_ver = platform.libc_ver()[1] libc_numeric = tuple(int(x) for x in glibc_ver.split('.') if x.isdigit()) libc_ok = libc_numeric >= (2, 9) else: libc_ok = False windows = sys.platform.startswith('win') min_win_version = windows and sys.version_info >= (3, 5) min_unix_version = not windows and sys.version_info >= (3, 3) if cpython and ((min_unix_version and libc_ok) or min_win_version): _cbor2 = Extension( '_cbor2', # math.h routines are built-in to MSVCRT libraries=['m'] if not windows else [], extra_compile_args=['-std=c99'], sources=[ 'source/module.c', 'source/encoder.c', 'source/decoder.c', 'source/tags.c', 'source/halffloat.c', ] ) kwargs = {'ext_modules': [_cbor2]} else: kwargs = {} setup( use_scm_version={ 'version_scheme': 'post-release', 'local_scheme': 'dirty-tag' }, setup_requires=[ 'setuptools >= 36.2.7', 'setuptools_scm >= 1.7.0' ], **kwargs )
Fix glibc version detect string
Fix glibc version detect string
Python
mit
agronholm/cbor2,agronholm/cbor2,agronholm/cbor2
bb204204482e868291efc34c5195d76024546a80
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages setup(name='geventconnpool', version = "0.1", description = 'TCP connection pool for gevent', url="https://github.com/rasky/geventconnpool", author="Giovanni Bajo", author_email="[email protected]", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, install_requires=[ 'gevent >= 0.13' ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: No Input/Output (Daemon)", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Topic :: Software Development", ])
from distutils.core import setup from setuptools import find_packages with open('README.rst') as file: long_description = file.read() setup(name='geventconnpool', version = "0.1a", description = 'TCP connection pool for gevent', long_description = long_description, url="https://github.com/rasky/geventconnpool", author="Giovanni Bajo", author_email="[email protected]", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, install_requires=[ 'gevent >= 0.13' ], classifiers=[ "Development Status :: 4 - Beta", "Environment :: No Input/Output (Daemon)", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Topic :: Software Development", ])
Add long description to the package.
Add long description to the package.
Python
mit
rasky/geventconnpool,rasky/geventconnpool
eebf41e6cf85f98d034708999e0321f9e09e4093
setup.py
setup.py
import names from setuptools import setup, find_packages setup( name=names.__title__, version=names.__version__, author=names.__author__, url="https://github.com/treyhunner/names", description="Generate random names", license=names.__license__, packages=find_packages(), package_data={'names': ['dist.*']}, include_package_data=True, entry_points={ 'console_scripts': [ 'names = names.main:main', ], }, )
import names from setuptools import setup, find_packages setup( name=names.__title__, version=names.__version__, author=names.__author__, url="https://github.com/treyhunner/names", description="Generate random names", long_description=open('README.rst').read(), license=names.__license__, packages=find_packages(), package_data={'names': ['dist.*']}, include_package_data=True, entry_points={ 'console_scripts': [ 'names = names.main:main', ], }, )
Use readme file for package long description
Use readme file for package long description
Python
mit
treyhunner/names,treyhunner/names
70e0c6bf40776cda30e871a290b2760bfdb993e5
setup.py
setup.py
from setuptools import setup __version__ = '0.2.2' setup( name='kuberender', version=__version__, py_modules=['kuberender'], install_requires=[ 'Jinja2==2.9.6', 'click==6.7', 'PyYAML==3.12', 'dpath==1.4.0', 'libvcs==0.2.3', ], entry_points=''' [console_scripts] kube-render=kuberender:run ''', )
from setuptools import setup __version__ = '0.2.3' setup( name='kuberender', version=__version__, py_modules=['kuberender'], description='A tool to render and apply Kubernetes manifests using Jinja2', install_requires=[ 'Jinja2==2.9.6', 'click==6.7', 'PyYAML==3.12', 'dpath==1.4.0', 'libvcs==0.2.3', ], entry_points=''' [console_scripts] kube-render=kuberender:run ''', )
Add description and bump version
Add description and bump version
Python
mit
jusbrasil/kube-render
676a1d3c74e526bd8cc67e97d89db2da7e207637
setup.py
setup.py
""" Py-Tree-sitter """ import platform from setuptools import setup, Extension setup( name = "tree_sitter", version = "0.0.8", maintainer = "Max Brunsfeld", maintainer_email = "[email protected]", author = "Max Brunsfeld", author_email = "[email protected]", url = "https://github.com/tree-sitter/py-tree-sitter", license = "MIT", platforms = ["any"], python_requires = ">=3.3", description = "Python bindings to the Tree-sitter parsing library", classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Compilers", "Topic :: Text Processing :: Linguistic", ], packages = ['tree_sitter'], ext_modules = [ Extension( "tree_sitter_binding", [ "tree_sitter/core/lib/src/lib.c", "tree_sitter/binding.c", ], include_dirs = [ "tree_sitter/core/lib/include", "tree_sitter/core/lib/utf8proc", ], extra_compile_args = ( ['-std=c99'] if platform.system() != 'Windows' else None ) ) ], project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', 'Documentation': 'http://initd.org/psycopg/docs/', } )
""" Py-Tree-sitter """ import platform from setuptools import setup, Extension setup( name = "tree_sitter", version = "0.0.8", maintainer = "Max Brunsfeld", maintainer_email = "[email protected]", author = "Max Brunsfeld", author_email = "[email protected]", url = "https://github.com/tree-sitter/py-tree-sitter", license = "MIT", platforms = ["any"], python_requires = ">=3.3", description = "Python bindings to the Tree-sitter parsing library", classifiers = [ "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Compilers", "Topic :: Text Processing :: Linguistic", ], packages = ['tree_sitter'], ext_modules = [ Extension( "tree_sitter_binding", [ "tree_sitter/core/lib/src/lib.c", "tree_sitter/binding.c", ], include_dirs = [ "tree_sitter/core/lib/include", "tree_sitter/core/lib/utf8proc", ], extra_compile_args = ( ['-std=c99'] if platform.system() != 'Windows' else None ) ) ], project_urls = { 'Source': 'https://github.com/tree-sitter/py-tree-sitter', } )
Remove an incorrect documentation URL
Remove an incorrect documentation URL Fixes #9.
Python
mit
tree-sitter/py-tree-sitter,tree-sitter/py-tree-sitter,tree-sitter/py-tree-sitter
20596e2ff6c0d107362628770db4602e5089c7de
setup.py
setup.py
import versioneer from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Read long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='tohu', version=versioneer.get_version(), description='Create random data in a controllable way', long_description=long_description, url='https://github.com/maxalbert/tohu', author='Maximilian Albert', author_email='[email protected]', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: Utilities', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', ], packages=['tohu', 'tohu/v4'], install_requires=['attrs', 'bidict', 'faker', 'pandas', 'psycopg2-binary', 'shapely', 'sqlalchemy', 'tqdm'], extras_require={ 'dev': ['ipython', 'jupyter'], 'test': ['pytest', 'nbval'], }, cmdclass=versioneer.get_cmdclass(), )
import versioneer from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Read long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='tohu', version=versioneer.get_version(), description='Create random data in a controllable way', long_description=long_description, url='https://github.com/maxalbert/tohu', author='Maximilian Albert', author_email='[email protected]', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Quality Assurance', 'Topic :: Software Development :: Testing', 'Topic :: Utilities', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', ], packages=['tohu', 'tohu/v4'], install_requires=['attrs', 'bidict', 'faker' 'geojson', 'pandas', 'psycopg2-binary', 'shapely', 'sqlalchemy', 'tqdm'], extras_require={ 'dev': ['ipython', 'jupyter'], 'test': ['pytest', 'nbval'], }, cmdclass=versioneer.get_cmdclass(), )
Add geojson as required dependency
Add geojson as required dependency
Python
mit
maxalbert/tohu
ae8a38c5e3952a98a586db1df15fd8b4527441c6
setup.py
setup.py
#!/usr/bin/env python import os import sys import io try: import setuptools except ImportError: from distribute_setup import use_setuptools use_setuptools() from setuptools import setup, Extension from setuptools import find_packages extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"] extra_link_args = [] if os.name == 'nt' else ["-g"] mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms', sources=['src/thinning.cpp', 'src/distance.cpp', 'src/grassfire.cpp', 'src/popcount.cpp', 'src/neighbours.cpp'], extra_compile_args=extra_compile_args, extra_link_args=extra_link_args) setup( name='cv_algorithms', license='Apache license 2.0', packages=find_packages(exclude=['tests*']), install_requires=['cffi>=0.7'], ext_modules=[mod_cv_algorithms], test_suite='nose.collector', tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'], setup_requires=['nose>=1.0'], platforms="any", zip_safe=False, version='1.0.0', long_description=io.open("README.rst", encoding="utf-8").read(), description='Optimized OpenCV extra algorithms for Python', url="https://github.com/ulikoehler/" )
#!/usr/bin/env python import os import sys import io try: import setuptools except ImportError: from distribute_setup import use_setuptools use_setuptools() from setuptools import setup, Extension from setuptools import find_packages extra_compile_args = [] if os.name == 'nt' else ["-g", "-O2", "-march=native"] extra_link_args = [] if os.name == 'nt' else ["-g"] platform_src = ["src/windows.cpp"] if os.name == 'nt' else [] mod_cv_algorithms = Extension('cv_algorithms._cv_algorithms', sources=['src/thinning.cpp', 'src/distance.cpp', 'src/grassfire.cpp', 'src/popcount.cpp', 'src/neighbours.cpp'] + platform_src, extra_compile_args=extra_compile_args, extra_link_args=extra_link_args) setup( name='cv_algorithms', license='Apache license 2.0', packages=find_packages(exclude=['tests*']), install_requires=['cffi>=0.7'], ext_modules=[mod_cv_algorithms], test_suite='nose.collector', tests_require=['nose', 'coverage', 'mock', 'rednose', 'nose-parameterized'], setup_requires=['nose>=1.0'], platforms="any", zip_safe=False, version='1.0.0', long_description=io.open("README.rst", encoding="utf-8").read(), description='Optimized OpenCV extra algorithms for Python', url="https://github.com/ulikoehler/" )
Include windows.cpp only on Windows
Include windows.cpp only on Windows
Python
apache-2.0
ulikoehler/cv_algorithms,ulikoehler/cv_algorithms
bf57dc8255df91954701fc13ca08e1999b9d2d39
setup.py
setup.py
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = [ 'pyramid', 'pyramid_zodbconn', 'pyramid_tm', 'pyramid_debugtoolbar', 'ZODB3', 'waitress', 'repoze.folder', 'zope.interface', 'requests', 'feedparser', 'WebHelpers', 'zc.queue', ] setup(name='push-hub', version='0.5', description='push-hub', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pylons pyramid', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires = requires, tests_require= requires, extras_require={'test': ['mock']}, test_suite="pushhub", entry_points = """\ [paste.app_factory] main = pushhub:main [console_scripts] process_subscriber_notices = pushhub.scripts:process_subscriber_notices """, )
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.txt')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = [ 'pyramid', 'pyramid_zodbconn', 'pyramid_tm', 'pyramid_debugtoolbar', 'ZODB3', 'waitress', 'repoze.folder', 'zope.interface', 'requests', 'feedparser', 'WebHelpers', 'zc.queue', ] setup(name='push-hub', version='0.5', description='push-hub', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='Six Feet Up', author_email='[email protected]', url='http://www.sixfeetup.com', keywords='web pylons pyramid', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires = requires, tests_require= requires, extras_require={'test': ['mock']}, test_suite="pushhub", entry_points = """\ [paste.app_factory] main = pushhub:main [console_scripts] process_subscriber_notices = pushhub.scripts:process_subscriber_notices """, )
Add author and URL info to make mkrelease happy.
Add author and URL info to make mkrelease happy.
Python
bsd-3-clause
ucla/PushHubCore
af802bd8c7f8787f80caf64f226edc996ac6fc3b
setup.py
setup.py
from setuptools import setup, find_packages setup( name='tangled', version='0.1a3.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', author='Wyatt Baldwin', author_email='[email protected]', packages=find_packages(), extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.0', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.1', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand test = tangled.scripts:TestCommand """, classifiers=( 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', ), )
from setuptools import setup setup( name='tangled', version='0.1a3.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', author='Wyatt Baldwin', author_email='[email protected]', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.0', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.1', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 1 - Planning', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', ], )
Update package configuration, pt. ii
Update package configuration, pt. ii
Python
mit
TangledWeb/tangled
a8fe2b52a418718bbde4214efc44cb59a7175430
setup.py
setup.py
#from distutils.core import setup from setuptools import setup setup( name='nubomedia', version='0.1', packages=['core', 'test', 'util', 'wsgi', 'model'], install_requires=[ 'python-heatclient', 'bottle', ], url='', license='', author='mpa', author_email='', description='Setuptool for Nubomedia', )
#from distutils.core import setup from setuptools import setup setup( name='nubomedia', version='0.1', packages=['core', 'test', 'util', 'wsgi', 'model'], install_requires=[ 'python-heatclient', 'bottle', ], url='', license='', author='mpa', author_email='', description='Nubomedia', )
Fix bugs in install script. Now it works fine.
Fix bugs in install script. Now it works fine.
Python
apache-2.0
manue1/connectivity-manager-agent,manue1/connectivity-manager-agent
b10412389ee886633d1721063489159252e759e8
setup.py
setup.py
from setuptools import setup, find_packages setup( name='bfg9000', version='0.1.0pre', license='BSD', author='Jim Porter', author_email='[email protected]', packages=find_packages(exclude=['test']), entry_points={ 'console_scripts': ['bfg9000=bfg9000.driver:main'], }, test_suite='test', )
from setuptools import setup, find_packages setup( name='bfg9000', version='0.1.0-dev', license='BSD', author='Jim Porter', author_email='[email protected]', packages=find_packages(exclude=['test']), entry_points={ 'console_scripts': ['bfg9000=bfg9000.driver:main'], }, test_suite='test', )
Fix version number to comply with PEP 440
Fix version number to comply with PEP 440
Python
bsd-3-clause
jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000
ee81d8966a5ef68edd6bb4459fc015234d6e0814
setup.py
setup.py
"""Open-ovf installer""" import os from distutils.core import setup CODE_BASE_DIR = 'py' SCRIPTS_DIR = 'py/scripts/' def list_scripts(): """List all scripts that should go to /usr/bin""" file_list = os.listdir(SCRIPTS_DIR) return [os.path.join(SCRIPTS_DIR, f) for f in file_list] setup(name='open-ovf', version='0.1', description='OVF implementation', url='http://open-ovf.sourceforge.net', license='EPL', packages=['ovf', 'ovf.commands'], package_dir = {'': CODE_BASE_DIR}, scripts=list_scripts(), )
"""Open-ovf installer""" import os from distutils.core import setup CODE_BASE_DIR = 'py' SCRIPTS_DIR = 'py/scripts/' def list_scripts(): """List all scripts that should go to /usr/bin""" file_list = os.listdir(SCRIPTS_DIR) return [os.path.join(SCRIPTS_DIR, f) for f in file_list] setup(name='open-ovf', version='0.1', description='OVF implementation', url='http://open-ovf.sourceforge.net', license='EPL', packages=['ovf', 'ovf.commands', 'ovf.env'], package_dir = {'': CODE_BASE_DIR}, scripts=list_scripts(), )
Add env subdirectory to package list
Add env subdirectory to package list Hi, This patch adds the ovf/env subdirectory to the package list so that setup.py installs it properly. Signed-off-by: David L. Leskovec <[email protected]> Signed-off-by: Scott Moser <[email protected]>
Python
epl-1.0
Awingu/open-ovf,Awingu/open-ovf,Awingu/open-ovf,Awingu/open-ovf
c19c69926e54e8268b7587a91264a976724a8801
setup.py
setup.py
from distutils.core import setup setup( name='scrAPI Utils', version='0.4.7', author='Chris Seto', author_email='[email protected]', packages=['scrapi.linter'], package_data={'scrapi.linter': ['../__init__.py']}, url='http://www.github.com/chrisseto/scrapi', license='LICENSE.txt', description='Package to aid in consumer creation for scrAPI', long_description=open('README.md').read(), )
from distutils.core import setup setup( name='scrAPI Utils', version='0.4.8', author='Chris Seto', author_email='[email protected]', packages=['scrapi.linter'], package_data={'scrapi.linter': ['../__init__.py']}, url='http://www.github.com/chrisseto/scrapi', license='LICENSE.txt', description='Package to aid in consumer creation for scrAPI', long_description=open('README.md').read(), )
Increment version number for latest linter version
Increment version number for latest linter version
Python
apache-2.0
fabianvf/scrapi,mehanig/scrapi,fabianvf/scrapi,mehanig/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,jeffreyliu3230/scrapi,icereval/scrapi,felliott/scrapi,erinspace/scrapi,alexgarciac/scrapi,felliott/scrapi,CenterForOpenScience/scrapi
b785a0dd83b6521d62e5d52c5c4c8115fa4b98fc
setup.py
setup.py
from setuptools import find_packages, setup setup( name='txkazoo', version='0.0.4', description='Twisted binding for Kazoo', maintainer='Manish Tomar', maintainer_email='[email protected]', license='Apache 2.0', packages=find_packages(), install_requires=['twisted==13.2.0', 'kazoo==2.0b1'] )
from setuptools import find_packages, setup setup( name='txkazoo', version='0.0.4', description='Twisted binding for Kazoo', long_description=open("README.md").read(), url="https://github.com/rackerlabs/txkazoo", maintainer='Manish Tomar', maintainer_email='[email protected]', license='Apache 2.0', packages=find_packages(), install_requires=['twisted==13.2.0', 'kazoo==2.0b1'] )
Add long_description from README + URL
Add long_description from README + URL
Python
apache-2.0
rackerlabs/txkazoo
3cc473fb6316fffa3c19980115f800518dcee115
setup.py
setup.py
import importlib from cx_Freeze import setup, Executable backend_path = importlib.import_module("bcrypt").__path__[0] backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend") # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "include_files": [ ("client/dist", "client"), "LICENSE", "templates", "readme.md", (backend_path, "lib/.libs_cffi_backend") ], "includes": [ "cffi", "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format" ], "namespace_packages": [ "virtool" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "cffi", "idna", "motor", "packaging", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] importlib.import_module("virtool") setup(name='virtool', executables=executables, options=options)
import importlib from cx_Freeze import setup, Executable backend_path = importlib.import_module("bcrypt").__path__[0] backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend") # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "include_files": [ ("client/dist", "client"), "LICENSE", "templates", "readme.md", (backend_path, "lib/.libs_cffi_backend") ], "includes": [ "cffi", "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format" "raven.processors" "namespace_packages": [ "virtool" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "cffi", "idna", "motor", "packaging", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] importlib.import_module("virtool") setup(name='virtool', executables=executables, options=options)
Fix missing raven.processors in build
Fix missing raven.processors in build
Python
mit
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
9529a7321b4cfc74594d7391baa760959fcb7568
setup.py
setup.py
from setuptools import setup from setuptools.command.test import test as TestCommand import sys class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errcode = pytest.main(self.test_args) sys.exit(errcode) setup( name='msumastro', version='IXME', description='Process FITS files painlessly', long_description=(open('README.rst').read()), license='BSD 3-clause', author='Matt Craig', author_email='[email protected]', packages=['msumastro'], include_package_data=True, install_requires=['astropysics>=0.0.dev0', 'astropy', 'numpy'], extras_require={ 'testing': ['pytest', 'pytest-capturelog'] }, classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2 :: Only'] )
from setuptools import setup, find_packages from setuptools.command.test import test as TestCommand import sys class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): import pytest errcode = pytest.main(self.test_args) sys.exit(errcode) setup( name='msumastro', version='FIXME', description='Process FITS files painlessly', url='http://github.com/mwcraig/msumastro', long_description=(open('README.rst').read()), license='BSD 3-clause', author='Matt Craig', author_email='[email protected]', packages=find_packages(exclude=['tests*']), include_package_data=True, install_requires=['astropysics>=0.0.dev0', 'astropy', 'numpy'], extras_require={ 'testing': ['pytest>1.4', 'pytest-capturelog'], 'docs': ['numpydoc', 'sphinx-argparse'] }, entry_points={ 'console_scripts': [ ('quick_add_keys_to_file = ' 'msumastro.scripts.quick_add_keys_to_file:main') ] }, classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2 :: Only'] )
Add entry point for quick_add_keys_to_file
Add entry point for quick_add_keys_to_file
Python
bsd-3-clause
mwcraig/msumastro
352d8578eabe5c0474875c5ddcff9f97fc7df73e
setup.py
setup.py
from setuptools import setup, find_packages try: long_description = open("README.rst").read() except IOError: long_description = "" setup( name='baldr', version='0.4.5', url='https://github.com/timsavage/baldr', license='LICENSE', author='Tim Savage', author_email='[email protected]', description='Odin integration to Django', long_description=long_description, packages=find_packages(), install_requires=['six', 'odin>=0.5', 'django>=1.5'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
from setuptools import setup, find_packages try: long_description = open("README.rst").read() except IOError: long_description = "" setup( name='baldr', version='0.4.5', url='https://github.com/timsavage/baldr', license='LICENSE', author='Tim Savage', author_email='[email protected]', description='Odin integration to Django', long_description=long_description, packages=find_packages(exclude=("django_test_runner",)), install_requires=['six', 'odin>=0.5', 'django>=1.5'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Exclude test_runner from dist package
Exclude test_runner from dist package
Python
bsd-3-clause
timsavage/baldr,timsavage/baldr
a709f5de366da5121e1d03f5de1ddd2b202c661e
setup.py
setup.py
from setuptools import setup setup( name="servicemanager", version="2.0.6", description="A python tool to manage developing and testing with lots of microservices", url="https://github.com/hmrc/service-manager", author="hmrc-web-operations", license="Apache Licence 2.0", packages=[ "servicemanager", "servicemanager.actions", "servicemanager.server", "servicemanager.service", "servicemanager.thirdparty", ], install_requires=[ "requests~=2.24.0", "pymongo==3.11.0", "bottle==0.12.18", "pytest==5.4.3", "pyhamcrest==2.0.2", "argcomplete==1.12.0", "prettytable==0.7.2" ], scripts=["bin/sm", "bin/smserver"], zip_safe=False, )
from setuptools import setup setup( name="servicemanager", version="2.0.7", description="A python tool to manage developing and testing with lots of microservices", url="https://github.com/hmrc/service-manager", author="hmrc-web-operations", license="Apache Licence 2.0", packages=[ "servicemanager", "servicemanager.actions", "servicemanager.server", "servicemanager.service", "servicemanager.thirdparty", ], install_requires=[ "requests~=2.24.0", "pymongo==3.11.0", "bottle==0.12.18", "pytest==5.4.3", "pyhamcrest==2.0.2", "argcomplete~=1.12.0", "prettytable==0.7.2" ], scripts=["bin/sm", "bin/smserver"], zip_safe=False, )
Allow later patch versions of argcomplete
Allow later patch versions of argcomplete Using ~ should install 1.12.1 if available (which it is) Specifically, we want the changes here: https://github.com/kislyuk/argcomplete/issues/321 importlib-metadata 2.0 was released, and argcomplete defined that 2.0 should never be use that version, until the most recent current version. I believe that one of our test dependencies imported a higher version of argcomplete before the resolver realised this. There is potentially another piece of work to NOT install test dependencies for users of servicemanager, but that can be postponed for now.
Python
apache-2.0
hmrc/service-manager,hmrc/service-manager,hmrc/service-manager,hmrc/service-manager
b5d801c561b4a73ba7ea41665b7fe756fc56689d
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import find_packages, setup SCRIPT_DIR = os.path.dirname(__file__) if not SCRIPT_DIR: SCRIPT_DIR = os.getcwd() SRC_PREFIX = 'src' packages = find_packages(SRC_PREFIX) setup( name='cmdline', version='0.0.0', description='Utilities for consistent command line tools', author='Roberto Aguilar', author_email='[email protected]', package_dir={'': SRC_PREFIX}, packages=packages, long_description=open('README.md').read(), url='http://github.com/rca/cmdline', license='LICENSE', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Natural Language :: English', 'Topic :: Utilities' ], install_requires=[ 'PyYAML>=3', ], )
#!/usr/bin/env python import os from setuptools import find_packages, setup SCRIPT_DIR = os.path.dirname(__file__) if not SCRIPT_DIR: SCRIPT_DIR = os.getcwd() SRC_PREFIX = 'src' def readme(): with open('README.md') as f: return f.read() packages = find_packages(SRC_PREFIX) setup( name='cmdline', version='0.0.0', description='Utilities for consistent command line tools', author='Roberto Aguilar', author_email='[email protected]', package_dir={'': SRC_PREFIX}, packages=packages, long_description=readme(), long_description_content_type='text/markdown', url='http://github.com/rca/cmdline', license='LICENSE', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Natural Language :: English', 'Topic :: Utilities' ], install_requires=[ 'PyYAML>=3', ], )
Add markdown content type for README
Add markdown content type for README
Python
apache-2.0
rca/cmdline
1d391ef70edcb81a7a6f27685e2dca103b24bd66
app.py
app.py
#!/usr/bin/env python import sys import os from flask import Flask, render_template, make_response app = Flask(__name__) @app.route('/') def index(): """Render index template""" return render_template('index.html') @app.route('/styles.css') def css(): """Render widget styles""" response = make_response(render_template('styles.css')) response.headers['Content-Type'] = 'text/css' return response if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'debug': app.debug = True port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
#!/usr/bin/env python import sys import os import yaml import inspect from flask import Flask, render_template, make_response, Response, \ stream_with_context import workers app = Flask(__name__) active_workers = [] @app.route('/') def index(): """Render index template""" return render_template('index.html') @app.route('/styles.css') def css(): """Render widget styles""" response = make_response(render_template('styles.css')) response.headers['Content-Type'] = 'text/css' return response @app.route('/events') def events(): def generate(): for worker in active_workers: yield 'data: %s\n\n' % (worker.get(),) return Response(stream_with_context(generate()), mimetype='text/event-stream') def _read_conf(): with open(os.path.join(sys.path[0], 'config.yml'), 'r') as conf: return yaml.load(conf) def _configure_jobs(conf): for cls_name, cls in inspect.getmembers(workers, inspect.isclass): name = cls_name.lower() if name not in conf.keys() or not conf[name]['enabled']: print 'Skipping missing or disabled worker: %s' % (name,) continue print 'Configuring worker: %s' % (name,) worker = cls(conf) active_workers.append(worker) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'debug': app.debug = True _configure_jobs(_read_conf()) port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port, threaded=True)
Implement server-sent events and basic worker support
Implement server-sent events and basic worker support
Python
mit
martinp/jarvis2,mpolden/jarvis2,Foxboron/Frank,martinp/jarvis2,Foxboron/Frank,mpolden/jarvis2,Foxboron/Frank,martinp/jarvis2,mpolden/jarvis2
994fc67234a60fbe2c8f4146fa72089ee94432a9
setup.py
setup.py
import os from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name="sphinx-refdoc", version=read('VERSION').strip(), author="Mateusz 'novo' Klos", author_email="[email protected]", license="MIT", description="Reference documentation generator for sphinx", long_description=read('README.rst'), package_dir={'': 'src'}, packages=find_packages('src'), install_requires=[ l.strip() for l in read('requirements.txt').split() if '==' in l ], entry_points={ 'console_scripts': [ 'sphinx-refdoc = refdoc.cli:gendocs', ] }, classifiers=[ "Development Status :: 2 - Pre-Alpha", "Topic :: Utilities", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", ], )
import os from setuptools import setup, find_packages def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name="sphinx-refdoc", url="https://github.com/novopl/sphinx-refdoc", version=read('VERSION').strip(), author="Mateusz 'novo' Klos", author_email="[email protected]", license="MIT", description="Reference documentation generator for sphinx", long_description=read('README.rst'), package_dir={'': 'src'}, packages=find_packages('src'), install_requires=[ l.strip() for l in read('requirements.txt').split() if '==' in l ], entry_points={ 'console_scripts': [ 'sphinx-refdoc = refdoc.cli:gendocs', ] }, classifiers=[ "Development Status :: 2 - Pre-Alpha", "Topic :: Utilities", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", ], )
Add URL to the package descirption
Add URL to the package descirption
Python
mit
novopl/sphinx-refdoc
e024f15d20f974a3dc4922631264d79d93a26e5f
setup.py
setup.py
from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() execfile('substance/_version.py') install_requires = ['setuptools>=1.1.3', 'PyYAML', 'tabulate', 'paramiko', 'netaddr', 'requests', 'tinydb', 'python_hosts==0.3.3', 'jinja2'] setup(name='substance', version=__version__, author='turbulent/bbeausej', author_email='[email protected]', license='MIT', long_description=readme, description='substance - local dockerized development environment', install_requires=install_requires, packages=find_packages(), package_data={ 'substance': ['support/*'] }, test_suite='tests', zip_safe=False, include_package_data=True, entry_points={ 'console_scripts': [ 'substance = substance.cli:cli', 'subenv = substance.subenv.cli:cli' ], })
from setuptools import setup, find_packages import platform with open('README.rst') as f: readme = f.read() execfile('substance/_version.py') install_requires = ['setuptools>=1.1.3', 'PyYAML', 'tabulate', 'paramiko', 'netaddr', 'requests', 'tinydb', 'python_hosts==0.3.3', 'jinja2'] if 'Darwin' in platform.system(): install_requires.append('macfsevents') setup(name='substance', version=__version__, author='turbulent/bbeausej', author_email='[email protected]', license='MIT', long_description=readme, description='substance - local dockerized development environment', install_requires=install_requires, packages=find_packages(), package_data={ 'substance': ['support/*'] }, test_suite='tests', zip_safe=False, include_package_data=True, entry_points={ 'console_scripts': [ 'substance = substance.cli:cli', 'subenv = substance.subenv.cli:cli' ], })
Add macfsevents dependency on Mac OS X
Add macfsevents dependency on Mac OS X
Python
apache-2.0
turbulent/substance,turbulent/substance
4c43e16c8f39007e1e91195b40e80b53f67b5823
app.py
app.py
import json import os import webapp2 from webapp2_extras import jinja2 class BaseHandler(webapp2.RequestHandler): @webapp2.cached_property def jinja2(self): return jinja2.get_jinja2(app=self.app) def render_template(self, filename, **template_args): self.response.write(self.jinja2.render_template(filename, **template_args)) class IndexHandler(BaseHandler): def get(self): self.render_template('index.html', name=self.request.get('name')) class RegistrationHandler(webapp2.RequestHandler): def post(self): json_object = json.loads(self.request.body) self.response.write('Registration Received {}'.format(json_object)) class GamesHandler(webapp2.RequestHandler): def post(self): self.response.write('Game Received') app = webapp2.WSGIApplication([ webapp2.Route('/', handler=IndexHandler, name='home', methods=['GET']), webapp2.Route('/register', handler=RegistrationHandler, name='registration', methods=['POST']), webapp2.Route('/games', handler=GamesHandler, name='games', methods=['POST']), ], debug=True)
import json import os import webapp2 from webapp2_extras import jinja2 class BaseHandler(webapp2.RequestHandler): @webapp2.cached_property def jinja2(self): return jinja2.get_jinja2(app=self.app) def render_template(self, filename, **template_args): self.response.write(self.jinja2.render_template(filename, **template_args)) class IndexHandler(BaseHandler): def get(self): self.render_template('index.html', name=self.request.get('name')) class RegistrationHandler(webapp2.RequestHandler): def post(self): json_object = json.loads(self.request.body) if not 'username' in json_object: webapp2.abort(422, detail='Field "username" is required') else: self.response.write('Registration Received {}'.format(json_object)) class GamesHandler(webapp2.RequestHandler): def post(self): self.response.write('Game Received') app = webapp2.WSGIApplication([ webapp2.Route('/', handler=IndexHandler, name='home', methods=['GET']), webapp2.Route('/register', handler=RegistrationHandler, name='registration', methods=['POST']), webapp2.Route('/games', handler=GamesHandler, name='games', methods=['POST']), ], debug=True)
Return Status 422 on bad JSON content
Return Status 422 on bad JSON content
Python
mit
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
20764887bc338c2cd366ad11fb41d8932c2326a2
bot.py
bot.py
import json import discord from handlers.message_handler import MessageHandler with open("config.json", "r") as f: config = json.load(f) client = discord.Client() message_handler = MessageHandler(config, client) @client.event async def on_ready(): print("Logged in as", client.user.name) @client.event async def on_message(message): await message_handler.handle(message) client.run(config["token"])
#!/usr/bin/env python import argparse import json import discord from handlers.message_handler import MessageHandler def main(): p = argparse.ArgumentParser() p.add_argument("--config", required=True, help="Path to configuration file") args = p.parse_args() with open(args.config, "r") as f: config = json.load(f) client = discord.Client() message_handler = MessageHandler(config, client) @client.event async def on_ready(): print("Logged in as", client.user.name) @client.event async def on_message(message): await message_handler.handle(message) client.run(config["token"]) if __name__ == "__main__": main()
Add --config argument as a path to the config file
Add --config argument as a path to the config file
Python
mit
azeier/hearthbot
a2fb1efc918e18bb0ecebce4604192b03af662b2
fib.py
fib.py
def fibrepr(n): fibs = [1, 2, 3, 5, 8, 13, 21, 34, 55, 89] def fib_iter(n, fibs, l): for i, f in enumerate(fibs): if f == n: yield '1' + i*'0' + l elif n > f: for fib in fib_iter(n - f, fibs[i+1:], '1' + i*'0' + l): yield fib else: break return fib_iter(n, fibs, '')
class Fibonacci(object): _cache = {0: 1, 1: 2} def __init__(self, n): self.n = n def get(self, n): if not n in Fibonacci._cache: Fibonacci._cache[n] = self.get(n-1) + self.get(n-2) return Fibonacci._cache[n] def next(self): return Fibonacci(self.n + 1) def __iter__(self): while True: yield self.get(self.n) self.n += 1 def fibrepr(n): def fib_iter(n, fib, l): for i, f in enumerate(fib): if f == n: yield '1' + i*'0' + l elif n > f: for match in fib_iter(n - f, fib.next(), '1' + i*'0' + l): yield match else: break return fib_iter(n, Fibonacci(0), '')
Add Fibonacci class and use it in representation
Add Fibonacci class and use it in representation
Python
mit
kynan/CodeDojo30