commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
979c56f882178ce49194850bd9e78c9dea4692dd
chardet/__init__.py
chardet/__init__.py
######################## BEGIN LICENSE BLOCK ######################## # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) detector.close() return detector.result
######################## BEGIN LICENSE BLOCK ######################## # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .compat import PY2, PY3 from .universaldetector import UniversalDetector from .version import __version__, VERSION def detect(byte_str): """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): raise TypeError('Expected object of type bytes or bytearray, got: ' '{0}'.format(type(byte_str))) else: byte_str = bytearray(byte_str) detector = UniversalDetector() detector.feed(byte_str) return detector.close()
Remove unnecessary line from detect
Remove unnecessary line from detect
Python
lgpl-2.1
ddboline/chardet,chardet/chardet,chardet/chardet,ddboline/chardet
e0989ff4c2292d0f2d053065bfa71124a3705559
jarn/mkrelease/colors.py
jarn/mkrelease/colors.py
import os import functools import blessed def color(func): functools.wraps(func) def wrapper(string): if os.environ.get('JARN_NO_COLOR') == '1': return string return func(string) return wrapper term = blessed.Terminal() bold = color(term.bold) blue = color(term.bold_blue) green = color(term.bold_green) red = color(term.bold_red)
import os import functools import blessed def color(func): assignments = functools.WRAPPER_ASSIGNMENTS if not hasattr(func, '__name__'): assignments = [x for x in assignments if x != '__name__'] @functools.wraps(func, assignments) def wrapper(string): if os.environ.get('JARN_NO_COLOR') == '1': return string return func(string) return wrapper term = blessed.Terminal() bold = color(term.bold) blue = color(term.bold_blue) green = color(term.bold_green) red = color(term.bold_red)
Fix wrapping in color decorator.
Fix wrapping in color decorator.
Python
bsd-2-clause
Jarn/jarn.mkrelease
f036e74a6d887a5267885ef09cfe9c3a4de7eea5
config.py
config.py
import os config = { 'DEBUG': not os.getenv('SERVER_SOFTWARE', '').startswith('Google App Engine/'), 'SECRET_KEY': 'Some big sentence', }
import os config = { 'DEBUG': True if os.getenv('SERVER_SOFTWARE', '').startswith('Development/') else False, 'SECRET_KEY': 'Some big sentence', }
Set DEBUG to true only when using Development server
Set DEBUG to true only when using Development server
Python
mit
i2nes/app-engine-blog,i2nes/app-engine-blog,i2nes/app-engine-blog
70d2f182c09583802da2860994a99fd2bc9e39d5
kimochiconsumer/views.py
kimochiconsumer/views.py
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('1') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
from pyramid.view import view_config from pyramid.httpexceptions import ( HTTPNotFound, ) @view_config(route_name='page', renderer='templates/page.mako') @view_config(route_name='page_view', renderer='templates/page.mako') def page_view(request): if 'page_id' in request.matchdict: data = request.kimochi.page(request.matchdict['page_id']) else: data = request.kimochi.page('index') return data @view_config(route_name='gallery_view', renderer='templates/gallery.mako') def gallery_view(request): data = request.kimochi.gallery(request.matchdict['gallery_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data @view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako') def gallery_image_view(request): data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id']) if 'gallery' not in data or not data['gallery']: raise HTTPNotFound return data
Use 'index' as the default page alias for lookups
Use 'index' as the default page alias for lookups
Python
mit
matslindh/kimochi-consumer
c537ec56660d6829c3297db9760fc75e2f6260b2
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup with open('README.md') as f: long_description = f.read().strip() setup(name='python-circuit', version='0.1.6', description='Simple implementation of the Circuit Breaker pattern', long_description=long_description, author='Edgeware', author_email='[email protected]', url='https://github.com/edgeware/python-circuit', packages=['circuit'], test_suite='circuit.test', tests_require=[ 'mockito==0.5.2', 'Twisted>=10.2' ], cmdclass=commands)
#!/usr/bin/env python from setuptools import setup with open('README.md') as f: long_description = f.read().strip() setup(name='python-circuit', version='0.1.6', description='Simple implementation of the Circuit Breaker pattern', long_description=long_description, author='Edgeware', author_email='[email protected]', url='https://github.com/edgeware/python-circuit', packages=['circuit'], test_suite='circuit.test', tests_require=[ 'mockito==0.5.2', 'Twisted>=10.2' ])
Remove deprecated kwarg and undefined variable.
Remove deprecated kwarg and undefined variable.
Python
apache-2.0
edgeware/python-circuit
db442d2300ee99f8b062432225291696f26448b7
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-addendum', version='0.0.1', description='Simple template-based content swapping for CMS-less sites', long_description=readme, author='Ben Lopatin', author_email='[email protected]', url='https://github.com/bennylope/django-addendum', license='BSD License', packages=find_packages(exclude=('example', 'docs')), platforms=['OS Independent'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4', ], include_package_data=True, )
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() setup( name='django-addendum', version='0.0.2', description='Simple template-based content swapping for CMS-less sites', long_description=readme, author='Ben Lopatin', author_email='[email protected]', url='https://github.com/bennylope/django-addendum', license='BSD License', packages=find_packages(exclude=('example', 'docs')), platforms=['OS Independent'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], install_requires=[ 'Django>=1.4', ], include_package_data=True, )
Fix version for migrations fix
Fix version for migrations fix
Python
bsd-2-clause
adw0rd/django-addendum-inline,bennylope/django-addendum,bennylope/django-addendum,adw0rd/django-addendum-inline
0dce3b4101b29ecb79d5743d2c86b40c22d0e7e9
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup with open('README.rst', 'r') as f: long_desc = f.read().decode('utf-8') setup(name='dmgbuild', version='1.0.0', description='Mac OS X command line utility to build disk images', long_description=long_desc, author='Alastair Houghton', author_email='[email protected]', url='http://alastairs-place.net/projects/dmgbuild', license='MIT License', platforms='darwin', packages=['dmgbuild'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: MIT License', 'Topic :: Desktop Environment', ], package_data = { 'dmgbuild': ['resources/*'] }, scripts=['scripts/dmgbuild'], install_requires=['ds_store >= 1.0.1', 'mac_alias >= 1.0.0', 'six >= 1.4.1'], provides=['dmgbuild'] )
# -*- coding: utf-8 -*- from setuptools import setup with open('README.rst', 'r') as f: long_desc = f.read().decode('utf-8') setup(name='dmgbuild', version='1.0.0', description='Mac OS X command line utility to build disk images', long_description=long_desc, author='Alastair Houghton', author_email='[email protected]', url='http://alastairs-place.net/projects/dmgbuild', license='MIT License', platforms='darwin', packages=['dmgbuild'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: MIT License', 'Topic :: Desktop Environment', ], package_data = { 'dmgbuild': ['resources/*'] }, scripts=['scripts/dmgbuild'], install_requires=['ds_store >= 1.0.1', 'mac_alias >= 1.0.0', 'six >= 1.4.1', 'pyobjc-framework-Quartz >= 3.0.4'], provides=['dmgbuild'] )
Add previously undeclared PyObjC Quartz dependency.
Add previously undeclared PyObjC Quartz dependency.
Python
mit
al45tair/dmgbuild
5ed223883a39c58af7db10d92829894407fba822
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup #from distutils.core import setup setup(name='imagesize', version='0.7.1', description='Getting image size from png/jpeg/jpeg2000/gif file', long_description=''' It parses image files' header and return image size. * PNG * JPEG * JPEG2000 * GIF This is a pure Python library. ''', author='Yoshiki Shibukawa', author_email='yoshiki at shibu.jp', url='https://github.com/shibukawa/imagesize_py', license="MIT", py_modules=['imagesize'], test_suite='test', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Multimedia :: Graphics' ] )
#!/usr/bin/env python from setuptools import setup #from distutils.core import setup setup(name='imagesize', version='0.7.1', description='Getting image size from png/jpeg/jpeg2000/gif file', long_description=''' It parses image files' header and return image size. * PNG * JPEG * JPEG2000 * GIF This is a pure Python library. ''', author='Yoshiki Shibukawa', author_email='yoshiki at shibu.jp', url='https://github.com/shibukawa/imagesize_py', license="MIT", py_modules=['imagesize'], test_suite='test', classifiers = [ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Multimedia :: Graphics' ] )
Document project is stable and ready for use in production
Document project is stable and ready for use in production
Python
mit
shibukawa/imagesize_py
436fa5dd4ee955b35ef9d68a0f6b293f957e2ce0
setup.py
setup.py
from distutils.core import setup setup( name = 'jsuite', packages = ['jsuite'], # this must be the same as the name above version = '0.3.0', scripts=['bin/jsuite'], install_requires = [ 'lxml==3.6.4', 'clint==0.5.1' ], description = 'Parsing and manipulation tools for JATS XML files.', author = 'Dipanjan Mukherjee', author_email = '[email protected]', url = 'https://github.com/schatten/jsuite', # use the URL to the github repo download_url = 'https://github.com/schatten/jsuite/tarball/0.1', # I'll explain this in a second keywords = ['xml', 'parsing', 'JATS', 'tools'], # arbitrary keywords classifiers = [], )
from distutils.core import setup setup( name = 'jsuite', packages = ['jsuite'], # this must be the same as the name above version = '0.4.0', scripts=['bin/jsuite'], install_requires = [ 'lxml==3.6.4', 'clint==0.5.1' ], description = 'Parsing and manipulation tools for JATS XML files.', author = 'Dipanjan Mukherjee', author_email = '[email protected]', url = 'https://github.com/schatten/jsuite', # use the URL to the github repo download_url = 'https://github.com/schatten/jsuite/tarball/0.1', # I'll explain this in a second keywords = ['xml', 'parsing', 'JATS', 'tools'], # arbitrary keywords classifiers = [], )
Add count function across entire document
v0.4.0: Add count function across entire document
Python
mit
schatten/jsuite,TypesetIO/jsuite
ad6c51fdbc0fbfa2993902b89a7dac04b62a244f
setup.py
setup.py
from setuptools import setup import transip setup( name = transip.__name__, version = transip.__version__, author = transip.__author__, author_email = transip.__email__, license = transip.__license__, description = transip.__doc__.splitlines()[0], long_description = open('README.rst').read(), url = 'http://github.com/goabout/transip-backup', download_url = 'http://github.com/goabout/transip-backup/archives/master', packages = ['transip'], include_package_data = True, zip_safe = False, platforms = ['all'], test_suite = 'tests', entry_points = { 'console_scripts': [ 'transip-api = transip.transip_cli:main', ], }, install_requires = [ 'requests', ], )
from setuptools import setup import transip setup( name = transip.__name__, version = transip.__version__, author = transip.__author__, author_email = transip.__email__, license = transip.__license__, description = transip.__doc__.splitlines()[0], long_description = open('README.rst').read(), url = 'http://github.com/goabout/transip-backup', download_url = 'http://github.com/goabout/transip-backup/archives/master', packages = ['transip', 'transip.service'], include_package_data = True, zip_safe = False, platforms = ['all'], test_suite = 'tests', entry_points = { 'console_scripts': [ 'transip-api = transip.transip_cli:main', ], }, install_requires = [ 'requests', ], )
Add package transip.service or else this is not installed
Add package transip.service or else this is not installed
Python
mit
benkonrath/transip-api,benkonrath/transip-api
9ddc63eb0e1e3612ac4a1ea5b95e405ca0915b52
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author = "Mike Svoboda", author_email = "[email protected]", py_modules=['CacheExtractor', 'RedisFinder'], data_files=[('/usr/local/bin', ['./scripts/extract_sysops_cache.py']), ('/usr/local/bin', ['./scripts/extract_sysops_api_to_disk.py']), ('/usr/local/bin', ['./scripts/extract_sysctl_live_vs_persistant_entries.py']), ('/usr/local/bin', ['./scripts/extract_user_account_access.py']), ('/usr/local/bin', ['./scripts/extract_user_sudo_privileges.py'])], package_dir={'': 'src'}, packages = ['seco'], )
#!/usr/bin/env python from distutils.core import setup setup(name="sysops-api", version="1.0", description="LinkedIn Redis / Cfengine API", author="Mike Svoboda", author_email="[email protected]", py_modules=['CacheExtractor', 'RedisFinder'], scripts=['scripts/extract_sysops_cache.py', 'scripts/extract_sysops_api_to_disk.py', 'scripts/extract_sysctl_live_vs_persistant_entries.py', 'scripts/extract_user_account_access.py', 'scripts/extract_user_sudo_privileges.py'], package_dir={'': 'src'}, packages=['seco'], )
Install scripts properly rather than as datafiles
Install scripts properly rather than as datafiles - also fix whitespace
Python
apache-2.0
linkedin/sysops-api,linkedin/sysops-api,slietz/sysops-api,slietz/sysops-api
a88452582e8d5ac01f9ccbdd3bf1736bf2209af2
setup.py
setup.py
import sys import os from setuptools import setup long_description = open('README.rst').read() classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', ] setup_kwargs = dict( name='powershift-cluster', version='1.0.4', description='PowerShift command plugin for creating OpenShift clusters.', long_description=long_description, url='https://github.com/getwarped/powershift-cli-cluster', author='Graham Dumpleton', author_email='[email protected]', license='BSD', classifiers=classifiers, keywords='openshift kubernetes', packages=['powershift', 'powershift.cluster'], package_dir={'powershift': 'src/powershift'}, install_requires=['powershift>=1.3.7'], entry_points = {'powershift_cli_plugins': ['cluster = powershift.cluster']}, ) setup(**setup_kwargs)
import sys import os from setuptools import setup long_description = open('README.rst').read() classifiers = [ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ] setup_kwargs = dict( name='powershift-cluster', version='1.0.4', description='PowerShift command plugin for creating OpenShift clusters.', long_description=long_description, url='https://github.com/getwarped/powershift-cluster', author='Graham Dumpleton', author_email='[email protected]', license='BSD', classifiers=classifiers, keywords='openshift kubernetes', packages=['powershift', 'powershift.cluster'], package_dir={'powershift': 'src/powershift'}, install_requires=['powershift>=1.3.8', 'powershift-cli'], entry_points = {'powershift_cli_plugins': ['cluster = powershift.cluster']}, ) setup(**setup_kwargs)
Update to depend on separate powershift-cli package.
Update to depend on separate powershift-cli package.
Python
bsd-2-clause
getwarped/powershift-cluster,getwarped/powershift-cluster
381c2a53b9aa11acb140d7807fc218c003e14f5b
setup.py
setup.py
from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/jodal/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
from __future__ import unicode_literals import re from setuptools import setup def get_version(filename): content = open(filename).read() metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content)) return metadata['version'] setup( name='Mopidy-Scrobbler', version=get_version('mopidy_scrobbler/__init__.py'), url='https://github.com/mopidy/mopidy-scrobbler', license='Apache License, Version 2.0', author='Stein Magnus Jodal', author_email='[email protected]', description='Mopidy extension for scrobbling played tracks to Last.fm', long_description=open('README.rst').read(), packages=['mopidy_scrobbler'], zip_safe=False, include_package_data=True, install_requires=[ 'setuptools', 'Mopidy', 'pylast >= 0.5.7', ], entry_points={ 'mopidy.ext': [ 'scrobbler = mopidy_scrobbler:Extension', ], }, classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Topic :: Multimedia :: Sound/Audio :: Players', ], )
Add dependency on pylast, update project URL
Add dependency on pylast, update project URL
Python
apache-2.0
mopidy/mopidy-scrobbler,mthssdrbrg/mopidy-scrobbler
7ae590a0ba1b8fba8eb6643c9a44fe848fe9c5ee
setup.py
setup.py
from setuptools import setup, find_packages install_requires = [ 'prompt_toolkit', 'pathlib', 'python-keystoneclient' ] test_requires = [ 'mock' ] setup( name='contrail-api-cli', version='0.1a2', description="Simple CLI program to browse Contrail API server", long_description=open('README.md').read(), author="Jean-Philippe Braun", author_email="[email protected]", maintainer="Jean-Philippe Braun", maintainer_email="[email protected]", url="http://www.github.com/eonpatapon/contrail-api-cli", packages=find_packages(), include_package_data=True, install_requires=install_requires, scripts=[], license="MIT", entry_points={ 'console_scripts': [ 'contrail-api-cli = contrail_api_cli.prompt:main' ], 'keystoneclient.auth.plugin': [ 'http = contrail_api_cli.auth:HTTPAuth' ] }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: User Interfaces', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4' ], keywords='contrail api cli', tests_require=test_requires, test_suite="contrail_api_cli.tests" )
import sys from setuptools import setup, find_packages install_requires = [ 'prompt_toolkit', 'python-keystoneclient' ] test_requires = [] if sys.version_info[0] == 2: install_requires.append('pathlib') test_requires.append('mock') setup( name='contrail-api-cli', version='0.1a2', description="Simple CLI program to browse Contrail API server", long_description=open('README.md').read(), author="Jean-Philippe Braun", author_email="[email protected]", maintainer="Jean-Philippe Braun", maintainer_email="[email protected]", url="http://www.github.com/eonpatapon/contrail-api-cli", packages=find_packages(), include_package_data=True, install_requires=install_requires, scripts=[], license="MIT", entry_points={ 'console_scripts': [ 'contrail-api-cli = contrail_api_cli.prompt:main' ], 'keystoneclient.auth.plugin': [ 'http = contrail_api_cli.auth:HTTPAuth' ] }, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: User Interfaces', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4' ], keywords='contrail api cli', tests_require=test_requires, test_suite="contrail_api_cli.tests" )
Install requirements depending on python version
Install requirements depending on python version
Python
mit
eonpatapon/contrail-api-cli
6dc434de2b23f4a8e93e5969d52eb6f641b9665f
setup.py
setup.py
#!/usr/bin/env python import sys import os if "publish" in sys.argv[-1]: os.system("python setup.py sdist upload") sys.exit() try: from setuptools import setup setup except ImportError: from distutils.core import setup setup # Command-line tools entry_points = {'console_scripts': [ 'K2onSilicon = K2fov.K2onSilicon:K2onSilicon_main', 'K2inMicrolensRegion = K2fov.c9:inMicrolensRegion_main', 'K2findCampaigns = K2fov.K2findCampaigns:K2findCampaigns_main', 'K2findCampaigns-csv = K2fov.K2findCampaigns:K2findCampaigns_csv_main' ]} setup(name='K2fov', version='2.1.dev', description='Find which targets are in the field of view of K2', author='Tom Barclay', author_email='[email protected]', url='https://github.com/mrtommyb/K2fov', packages=['K2fov'], package_data={'K2fov': ['data/*.json']}, install_requires=["numpy>=1.8"], entry_points=entry_points, )
#!/usr/bin/env python import sys import os if "publish" in sys.argv[-1]: os.system("python setup.py sdist upload") sys.exit() try: from setuptools import setup setup except ImportError: from distutils.core import setup setup # Command-line tools entry_points = {'console_scripts': [ 'K2onSilicon = K2fov.K2onSilicon:K2onSilicon_main', 'K2inMicrolensRegion = K2fov.c9:inMicrolensRegion_main', 'K2findCampaigns = K2fov.K2findCampaigns:K2findCampaigns_main', 'K2findCampaigns-csv = K2fov.K2findCampaigns:K2findCampaigns_csv_main' ]} setup(name='K2fov', version='3.0.0', description='Find which targets are in the field of view of K2', author='Tom Barclay', author_email='[email protected]', url='https://github.com/mrtommyb/K2fov', packages=['K2fov'], package_data={'K2fov': ['data/*.json']}, install_requires=["numpy>=1.8"], entry_points=entry_points, )
Bump version to v3.0.0 - ready for release
Bump version to v3.0.0 - ready for release
Python
mit
mrtommyb/K2fov,KeplerGO/K2fov
1fe9117c57a947eb22a1f269220d21553a131f02
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-yadt', url='https://chris-lamb.co.uk/projects/django-yadt', version='2.0.5', description="Yet Another Django Thumbnailer", author="Chris Lamb", author_email='[email protected]', license='BSD', packages=find_packages(), install_requires=( 'Django>=1.6.8', 'Pillow', ), )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-yadt', url='https://chris-lamb.co.uk/projects/django-yadt', version='2.0.5', description="Yet Another Django Thumbnailer", author="Chris Lamb", author_email='[email protected]', license='BSD', packages=find_packages(), install_requires=( 'Django>=1.8', 'Pillow', ), )
Update Django requirement to latest LTS
Update Django requirement to latest LTS
Python
bsd-3-clause
lamby/django-yadt
b2e378a747f4cd1172c9c8bffecda327c3dd25cc
setup.py
setup.py
import os from setuptools import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "nanospider", version = "0.1.0", author = "Andrew Pendleton", author_email = "[email protected]", description = "A tiny caching link-follower built on gevent, lxml, and scrapelib", license = "BSD", keywords = "spider gevent lxml scrapelib", url = "http://github.com/sunlightlabs/nanospider/", packages=find_packages(), long_description=read('README.md'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires = [ "requests", "gevent", "scrapelib", "lxml", "url>=0.1.2dev", ], dependency_links=[ "git+https://github.com/seomoz/url-py.git#egg=url-0.1.2dev", ], )
import os from setuptools import setup, find_packages # Utility function to read the README file. # Used for the long_description. It's nice, because now 1) we have a top level # README file and 2) it's easier to type in the README file than to put a raw # string in below ... def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "nanospider", version = "0.1.0", author = "Andrew Pendleton", author_email = "[email protected]", description = "A tiny caching link-follower built on gevent, lxml, and scrapelib", license = "BSD", keywords = "spider gevent lxml scrapelib", url = "http://github.com/sunlightlabs/nanospider/", packages=find_packages(), long_description=read('README.md'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires = [ "requests", "gevent", "scrapelib", "lxml", "url>=0.1.3", ], dependency_links=[ "git+https://github.com/seomoz/url-py.git#egg=url-0.1.3", ], )
Install url from the right place.
Install url from the right place.
Python
bsd-3-clause
sunlightlabs/nanospider
9cb406e18cedf8995c31a1760968a1ee86423c5f
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup, find_packages # We want to restrict newer versions while we deal with upstream breaking changes. discordpy_version = '==0.10.0' # TODO read README(.rst? .md looks bad on pypi) for long_description. # Could use pandoc, but the end user shouldn't need to do this in setup. # Alt. could have package-specific description. More error-prone though. setup( # More permanent entries name='CrabBot', author='TAOTheCrab', url='https://github.com/TAOTheCrab/CrabBot', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License' 'Programming Language :: Python :: 3.5' ], # Entries likely to be modified description='A simple Discord bot', version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated. packages=find_packages(), # A little lazy install_requires=[ 'discord.py{}'.format(discordpy_version) ], extras_require={ 'voice': [ 'discord.py[voice]{}'.format(discordpy_version), 'youtube_dl' ] } )
#!/usr/bin/env python3 from setuptools import setup, find_packages # We want to restrict newer versions while we deal with upstream breaking changes. discordpy_version = '==0.10.0' # TODO read README(.rst? .md looks bad on pypi) for long_description. # Could use pandoc, but the end user shouldn't need to do this in setup. # Alt. could have package-specific description. More error-prone though. setup( # More permanent entries name='crabbot', author='TAOTheCrab', url='https://github.com/TAOTheCrab/CrabBot', license='MIT', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: MIT License' 'Programming Language :: Python :: 3.5' ], # Entries likely to be modified description='A simple Discord bot', version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated. packages=find_packages(), # A little lazy install_requires=[ 'discord.py{}'.format(discordpy_version) ], extras_require={ 'voice': [ 'discord.py[voice]{}'.format(discordpy_version), 'youtube_dl' ] } # scripts=['__main__.py'] )
Tweak the package name, add a note
Tweak the package name, add a note
Python
mit
TAOTheCrab/CrabBot
5844d7203a2f17752bbeca57dd404b83cc8ad475
setup.py
setup.py
from setuptools import setup with open('README.md') as readme_file: long_description = readme_file.read() setup( name="ipuz", version="0.1.1", license="MIT", description="Python library for reading and writing ipuz puzzle files", long_description=long_description, author="Simeon Visser", author_email="[email protected]", url="https://github.com/svisser/ipuz", install_requires=[ 'six==1.6.1' ], packages=['ipuz', 'ipuz.puzzlekinds', 'ipuz.structures'], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Games/Entertainment :: Puzzle Games", ] )
from setuptools import setup with open('README.md') as readme_file: long_description = readme_file.read() setup( name="ipuz", version="0.1.1", license="MIT", description="Python library for reading and writing ipuz puzzle files", long_description=long_description, author="Simeon Visser", author_email="[email protected]", url="https://github.com/svisser/ipuz", install_requires=[ 'six' ], packages=['ipuz', 'ipuz.puzzlekinds', 'ipuz.structures'], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Games/Entertainment :: Puzzle Games", ] )
Allow more flexibility in what six version is used
Allow more flexibility in what six version is used
Python
mit
svisser/ipuz
e693352b2c9787748cb1dcf2bfd6e134292bfa6a
setup.py
setup.py
import sys, os from setuptools import setup, Extension, find_packages import numpy kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.pyx','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
import sys, os from setuptools import setup, Extension, find_packages import numpy # Note when building sdist package: # Make sure to generate src/imops.c with "pyrexc src/imops.pyx". kws = {} if not int(os.getenv( 'DISABLE_INSTALL_REQUIRES','0' )): kws['install_requires'] = [ 'numpy>=1.0.4', ] setup(name="motmot.imops", description="image format conversion (e.g. between MONO8, YUV422, and RGB)", long_description = """ This is a subpackage of the motmot family of digital image utilities. """, version="0.5.2", license="BSD", maintainer="Andrew Straw", maintainer_email="[email protected]", url="http://code.astraw.com/projects/motmot/imops.html", packages = find_packages(), namespace_packages = ['motmot'], ext_modules=[Extension(name="motmot.imops.imops", sources=['src/imops.c','src/color_convert.c',], include_dirs=[numpy.get_include()], ), ], zip_safe = True, **kws)
Add note about building sdist
Add note about building sdist
Python
bsd-3-clause
motmot/imops,motmot/imops
da58b6ec7b9b74867e7755667988d60218c9699a
setup.py
setup.py
from setuptools import setup setup(name='mordecai', version='2.0.0a1', description='Full text geoparsing and event geocoding', url='http://github.com/openeventdata/mordecai/', author='Andy Halterman', author_email='[email protected]', license='MIT', packages=['mordecai'], keywords = ['geoparsing', 'nlp', 'geocoding', 'toponym resolution'], include_package_data=True, package_data = {'data': ['admin1CodesASCII.json', 'countries.json', 'nat_df.csv', 'stopword_country_names.json'], 'models' : ['country_model.h5', 'rank_model.h5']} )
from setuptools import setup setup(name='mordecai', version='2.0.0a2', description='Full text geoparsing and event geocoding', url='http://github.com/openeventdata/mordecai/', author='Andy Halterman', author_email='[email protected]', license='MIT', packages=['mordecai'], keywords = ['geoparsing', 'nlp', 'geocoding', 'toponym resolution'], include_package_data=True, package_data = {'data': ['admin1CodesASCII.json', 'countries.json', 'nat_df.csv', 'stopword_country_names.json'], 'models' : ['country_model.h5', 'rank_model.h5']} )
Update README and fix typos
Update README and fix typos
Python
mit
openeventdata/mordecai
f61b8cfcbf98da826a847981834763198db42867
setup.py
setup.py
from setuptools import setup, find_packages setup( name='ckanext-archiver', version='0.1', packages=find_packages(), install_requires=[ 'celery==2.4.5', 'kombu==1.5.1', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==0.6.4', 'messytables>=0.1.4', 'flask==0.8' # flask needed for tests ], author='Open Knowledge Foundation', author_email='[email protected]', description='Archive ckan resources', long_description='Archive ckan resources', license='MIT', url='http://ckan.org/wiki/Extensions', download_url='', include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ], entry_points=''' [paste.paster_command] archiver = ckanext.archiver.commands:Archiver [ckan.plugins] archiver = ckanext.archiver.plugin:ArchiverPlugin [ckan.celery_task] tasks = ckanext.archiver.celery_import:task_imports ''' )
from setuptools import setup, find_packages setup( name='ckanext-archiver', version='0.1', packages=find_packages(), install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==0.6.4', 'messytables>=0.1.4', 'flask==0.8' # flask needed for tests ], author='Open Knowledge Foundation', author_email='[email protected]', description='Archive ckan resources', long_description='Archive ckan resources', license='MIT', url='http://ckan.org/wiki/Extensions', download_url='', include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules' ], entry_points=''' [paste.paster_command] archiver = ckanext.archiver.commands:Archiver [ckan.plugins] archiver = ckanext.archiver.plugin:ArchiverPlugin [ckan.celery_task] tasks = ckanext.archiver.celery_import:task_imports ''' )
Change celery and kombu requirements to match ckanext-datastorer
Change celery and kombu requirements to match ckanext-datastorer
Python
mit
datagovuk/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,ckan/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,datagovuk/ckanext-archiver,ckan/ckanext-archiver,datagovuk/ckanext-archiver,ckan/ckanext-archiver
232eba0b7521d4fa7c005339f64690dd0b807cd2
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages import versioneer with open("requirements.txt") as f: requirements = f.read().splitlines() setup( name="s3contents", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description="A S3-backed ContentsManager implementation for Jupyter", url="https://github.com/danielfrg/s3contents", maintainer="Daniel Rodriguez", maintainer_email="[email protected]", license="Apache 2.0", packages=find_packages(), install_requires=requirements, zip_safe=False, )
import os from distutils.core import setup from setuptools import find_packages import versioneer def read_file(filename): filepath = os.path.join( os.path.dirname(os.path.dirname(__file__)), filename) if os.path.exists(filepath): return open(filepath).read() else: return '' REQUIREMENTS = read_file("requirements.txt").splitlines() setup( name="s3contents", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description="A S3-backed ContentsManager implementation for Jupyter", long_description=read_file('README.md'), long_description_content_type="text/markdown", url="https://github.com/danielfrg/s3contents", maintainer="Daniel Rodriguez", maintainer_email="[email protected]", license="Apache 2.0", packages=find_packages(), install_requires=REQUIREMENTS, zip_safe=False, )
Handle markdown long description for Pypi
Handle markdown long description for Pypi
Python
apache-2.0
danielfrg/s3contents
f157ad4c971628c8fb0b1f94f6fd080c75413064
setup.py
setup.py
from setuptools import setup, find_packages setup( name="xoxzo.logwatch", version="0.4", packages=find_packages(), install_requires=[ 'Baker==1.3', 'pytz==2016.3', ], entry_points={ 'console_scripts': [ 'logwatch = xoxzo.logwatch.main:main', ], }, )
from setuptools import setup, find_packages setup( name="xoxzo.logwatch", version="0.4", packages=find_packages(), install_requires=[ 'Baker==1.3', 'pytz>=2016.3', ], entry_points={ 'console_scripts': [ 'logwatch = xoxzo.logwatch.main:main', ], }, )
Set pytz version minimum 2016.3
Set pytz version minimum 2016.3
Python
mit
xoxzo/xoxzo.logwatch
79a6177fa8366879fbb0ed1cb0961cca6b5ab89a
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import materializecssform with open("README.md", "r") as fh: long_description = fh.read() setup( name='django-materializecss-form', version=materializecssform.__version__, packages=find_packages(), author="Kal Walkden", author_email="[email protected]", description="A simple Django form template tag to work with Materializecss", long_description=long_description, long_description_content_type="text/markdown", include_package_data=True, url='https://github.com/kalwalkden/django-materializecss-form', classifiers=[ "Programming Language :: Python", "Development Status :: 4 - Beta", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3.6", ], license="MIT", zip_safe=False )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import materializecssform with open("README.md", "r", encoding="utf-8") as fh: long_description = fh.read() setup( name='django-materializecss-form', version=materializecssform.__version__, packages=find_packages(), author="Kal Walkden", author_email="[email protected]", description="A simple Django form template tag to work with Materializecss", long_description=long_description, long_description_content_type="text/markdown", include_package_data=True, url='https://github.com/kalwalkden/django-materializecss-form', classifiers=[ "Programming Language :: Python", "Development Status :: 4 - Beta", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3.6", ], license="MIT", zip_safe=False )
Fix possible decoding problem when locale is set to C/ASCII
Fix possible decoding problem when locale is set to C/ASCII
Python
mit
florent1933/django-materializecss-form,florent1933/django-materializecss-form
2727fccdb3672e1c7b28e4ba94ec743b53298f26
src/main.py
src/main.py
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() if __name__ == '__main__':main()
''' Created on Aug 12, 2017 @author: Aditya This is the main file and will import other modules/codes written for python tkinter demonstration ''' import program1 as p1 import program2 as p2 import program3 as p3 import program4 as p4 import program5 as p5 import program6 as p6 import program7 as p7 import program8 as p8 import program9 as p9 import program10 as p10 import program11 as p11 import program12 as p12 import program13 as p13 def main(): p1.sayhello() p2.HelloAppLaunch() p3.GreetingAppLaunch() p4.launchButtonApp() p5.launchButton2App() p6.launchEntryApp() p7.launchSimpleCalenderApp() p8.ControlledPorgressApp() p9.DisplayAppLaunch() p10.launchTopLevelApp() p11.launchPanedWindowApp() p12.launchNoteBookApp() p13.launchApp() if __name__ == '__main__':main()
Include Text App in Main
Include Text App in Main
Python
mit
deshadi/python-gui-demos
ef1bd5c935d6fe814f2755dadff398929a5a23d6
tasks.py
tasks.py
from datetime import timedelta from celery import Celery from celery.utils.log import get_task_logger import settings import client celery = Celery('tasks', broker=settings.CELERY_BROKER, backend=settings.CELERY_RESULT_BACKEND) @celery.task def refresh_plows(): client.refresh_plows() client.logger = get_task_logger(__name__) celery.conf.CELERYBEAT_SCHEDULE = { 'update-plows': { 'task': '%s.refresh_plows' % __name__, 'schedule': timedelta(seconds=30) }, }
from datetime import timedelta from celery import Celery from celery.utils.log import get_task_logger import settings import client celery = Celery('tasks', broker=settings.CELERY_BROKER, backend=settings.CELERY_RESULT_BACKEND) @celery.task def refresh_plows(): client.refresh_plows() client.logger = get_task_logger(__name__) celery.conf.CELERYBEAT_SCHEDULE = { 'update-plows': { 'task': '%s.refresh_plows' % __name__, 'schedule': timedelta(seconds=10) }, }
Update from upstream server every 10 secs
Update from upstream server every 10 secs
Python
agpl-3.0
City-of-Helsinki/aura,City-of-Helsinki/aura
e3a2e65199c3d0db9576a25dc039f66e094171b6
src/passgen.py
src/passgen.py
import string import random import argparse def passgen(length=8): """Generate a strong password with *length* characters""" pool = string.ascii_uppercase + string.ascii_lowercase + string.digits return ''.join(random.SystemRandom().choice(pool) for _ in range(length)) def main(): parser = argparse.ArgumentParser("Generate strong random password.") parser.add_argument("length", help="the number of characters to generate", type=int) parser.add_argument("-n", "--number", help="how many passwords to generate", type=int) args = parser.parse_args() for _ in range(args.number): print passgen(args.length)
import string import random import argparse def passgen(length=12): """Generate a strong password with *length* characters""" pool = string.ascii_uppercase + string.ascii_lowercase + string.digits return ''.join(random.SystemRandom().choice(pool) for _ in range(length)) def main(): parser = argparse.ArgumentParser("Generate strong random password.") parser.add_argument("-l", "--length", help="the number of characters to generate", type=int, default=12) parser.add_argument("-n", "--number", help="how many passwords to generate", type=int, default=10) args = parser.parse_args() for _ in range(args.number): print passgen(args.length)
Make length optional. Set up defaults.
Make length optional. Set up defaults.
Python
mit
soslan/passgen
1e327401d9c020bb7941b20ff51890ad1729973d
tests.py
tests.py
import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_blank_login_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
import pytest from django.contrib.auth import get_user_model from seleniumlogin import force_login pytestmark = [pytest.mark.django_db(transaction=True)] def test_non_authenticated_user_cannot_access_test_page(selenium, live_server): selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'fail' in selenium.page_source def test_authenticated_user_can_access_test_page(selenium, live_server): User = get_user_model() user = User.objects.create_user(username='selenium', password='password') force_login(user, selenium, live_server.url) selenium.get('{}/test/login_required/'.format(live_server.url)) assert 'success' in selenium.page_source
Rename test. The test tries to access a test page, not a blank page
Rename test. The test tries to access a test page, not a blank page
Python
mit
feffe/django-selenium-login,feffe/django-selenium-login
dfc6e7d9b6c415dc230b5ea4948e59a486e7364c
mysite/deployment_settings.py
mysite/deployment_settings.py
from settings import * OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg' DEBUG=False ADMINS=[ ('All OH devs', '[email protected]'), ] INVITE_MODE=True # Enabled on production site INVITATIONS_PER_USER=20 TEMPLTE_DEBUG=False
from settings import * OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg' DEBUG=False ADMINS=[ ('All OH devs', '[email protected]'), ] INVITE_MODE=True # Enabled on production site INVITATIONS_PER_USER=20 TEMPLTE_DEBUG=False
Use right email address for devel@ list
Use right email address for devel@ list
Python
agpl-3.0
ojengwa/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,mzdaniel/oh-mainline,willingc/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,onceuponatimeforever/oh-mainline,mzdaniel/oh-mainline,sudheesh001/oh-mainline,eeshangarg/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,eeshangarg/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,Changaco/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,jledbetter/openhatch,mzdaniel/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,nirmeshk/oh-mainline,onceuponatimeforever/oh-mainline,Changaco/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,moijes12/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,campbe13/openhatch,ojengwa/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,jledbetter/openhatch,eeshangarg/oh-mainline,nirmeshk/oh-mainline,moijes12/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,campbe13/openhatch,vipul-sharma20/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,willingc/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,nirmeshk/oh-mainline
741545dcf58fdfaf882d797d3ce4f7607ca0dad4
kobo/client/commands/cmd_resubmit_tasks.py
kobo/client/commands/cmd_resubmit_tasks.py
# -*- coding: utf-8 -*- from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
# -*- coding: utf-8 -*- from __future__ import print_function import sys from kobo.client.task_watcher import TaskWatcher from kobo.client import ClientCommand class Resubmit_Tasks(ClientCommand): """resubmit failed tasks""" enabled = True def options(self): self.parser.usage = "%%prog %s task_id [task_id...]" % self.normalized_name self.parser.add_option("--force", action="store_true", help="Resubmit also tasks which are closed properly.") self.parser.add_option("--nowait", default=False, action="store_true", help="Don't wait until tasks finish.") def run(self, *args, **kwargs): if len(args) == 0: self.parser.error("At least one task id must be specified.") username = kwargs.pop("username", None) password = kwargs.pop("password", None) tasks = args self.set_hub(username, password) resubmitted_tasks = [] failed = False for task_id in tasks: try: resubmitted_id = self.hub.client.resubmit_task(task_id, kwargs.pop("force", False)) resubmitted_tasks.append(resubmitted_id) except Exception as ex: failed = True print(ex) if not kwargs.get('nowait'): TaskWatcher.watch_tasks(self.hub, resubmitted_tasks) if failed: sys.exit(1)
Add --nowait option to resubmit-tasks cmd
Add --nowait option to resubmit-tasks cmd In some use cases, waiting till the tasks finish is undesirable. Nowait option should be provided.
Python
lgpl-2.1
release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo
2e70450846fb0f40d6d0449093ddf121da7547a8
tbmodels/__init__.py
tbmodels/__init__.py
# -*- coding: utf-8 -*- # (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik # Author: Dominik Gresch <[email protected]> r""" TBmodels is a tool for creating / loading and manipulating tight-binding models. """ __version__ = '1.3.2a1' # import order is important due to circular imports from . import helpers from ._tb_model import Model from . import _kdotp from . import io
# -*- coding: utf-8 -*- # (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik # Author: Dominik Gresch <[email protected]> r""" TBmodels is a tool for creating / loading and manipulating tight-binding models. """ __version__ = '1.3.2' # import order is important due to circular imports from . import helpers from ._tb_model import Model from . import _kdotp from . import io
Remove 'alpha' designation from version number.
Remove 'alpha' designation from version number.
Python
apache-2.0
Z2PackDev/TBmodels,Z2PackDev/TBmodels
8e7a92bce03ca472bc78bb9df5e2c9cf063c29b7
temba/campaigns/tasks.py
temba/campaigns/tasks.py
from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('event', 'event.org'): try: push_task(fire.event.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
from __future__ import unicode_literals from datetime import datetime from django.utils import timezone from djcelery_transactions import task from redis_cache import get_redis_connection from .models import Campaign, EventFire from django.conf import settings import redis from temba.msgs.models import HANDLER_QUEUE, HANDLE_EVENT_TASK, FIRE_EVENT from temba.utils.queues import push_task @task(track_started=True, name='check_campaigns_task') # pragma: no cover def check_campaigns_task(sched_id=None): """ See if any event fires need to be triggered """ logger = check_campaigns_task.get_logger() # get a lock r = get_redis_connection() key = 'check_campaigns' # only do this if we aren't already checking campaigns if not r.get(key): with r.lock(key, timeout=3600): # for each that needs to be fired for fire in EventFire.objects.filter(fired=None, scheduled__lte=timezone.now()).select_related('contact', 'contact.org'): try: push_task(fire.contact.org, HANDLER_QUEUE, HANDLE_EVENT_TASK, dict(type=FIRE_EVENT, id=fire.id)) except: # pragma: no cover logger.error("Error running campaign event: %s" % fire.pk, exc_info=True)
Use correct field to get org from
Use correct field to get org from
Python
agpl-3.0
harrissoerja/rapidpro,pulilab/rapidpro,pulilab/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,harrissoerja/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,Thapelo-Tsotetsi/rapidpro,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,praekelt/rapidpro,harrissoerja/rapidpro,praekelt/rapidpro,reyrodrigues/EU-SMS,Thapelo-Tsotetsi/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,ewheeler/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro
1e2086b868861034d89138349c4da909f380f19e
feedback/views.py
feedback/views.py
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from rest_framework import serializers, status from rest_framework.response import Response from rest_framework.views import APIView from .models import Feedback class FeedbackSerializer(serializers.ModelSerializer): class Meta: model = Feedback fields = '__all__' @method_decorator(csrf_exempt, name='dispatch') class FeedbackView(APIView): @csrf_exempt def post(self, request, format=None): if self.request.user.is_authenticated(): user = self.request.user else: user = None if 'user' in request.data: del request.data['user'] user_agent = request.data.get('user_agent') if not user_agent: user_agent = request.META.get('HTTP_USER_AGENT', None) serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save(user=user, user_agent=user_agent) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
Make feedback compatible with DRF >3.3.0
Make feedback compatible with DRF >3.3.0
Python
mit
City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel
550ce00c5d2757287fee2d810543d3f17ad9ccff
Orange/tests/sql/test_naive_bayes_sql.py
Orange/tests/sql/test_naive_bayes_sql.py
import unittest from numpy import array import Orange.classification.naive_bayes as nb from Orange import preprocess from Orange.data.sql.table import SqlTable from Orange.data import Domain from Orange.data.variable import DiscreteVariable from Orange.tests.sql.base import has_psycopg2 @unittest.skipIf(not has_psycopg2, "Psycopg2 is required for sql tests.") class NaiveBayesTest(unittest.TestCase): def test_NaiveBayes(self): table = SqlTable(dict(host='localhost', database='test'), 'iris', type_hints=Domain([], DiscreteVariable("iris", values=['Iris-setosa', 'Iris-virginica', 'Iris-versicolor']))) table = preprocess.Discretize(table) bayes = nb.NaiveBayesLearner() clf = bayes(table) # Single instance prediction self.assertEqual(clf(table[0]), table[0].get_class()) # Table prediction pred = clf(table) actual = array([ins.get_class() for ins in table]) ca = pred == actual ca = ca.sum() / len(ca) self.assertGreater(ca, 0.95) self.assertLess(ca, 1.)
import unittest from numpy import array import Orange.classification.naive_bayes as nb from Orange import preprocess from Orange.data.sql.table import SqlTable from Orange.data import Domain from Orange.data.variable import DiscreteVariable from Orange.tests.sql.base import has_psycopg2 @unittest.skipIf(not has_psycopg2, "Psycopg2 is required for sql tests.") class NaiveBayesTest(unittest.TestCase): def test_NaiveBayes(self): table = SqlTable(dict(host='localhost', database='test'), 'iris', type_hints=Domain([], DiscreteVariable("iris", values=['Iris-setosa', 'Iris-virginica', 'Iris-versicolor']))) table = preprocess.Discretize()(table) bayes = nb.NaiveBayesLearner() clf = bayes(table) # Single instance prediction self.assertEqual(clf(table[0]), table[0].get_class()) # Table prediction pred = clf(table) actual = array([ins.get_class() for ins in table]) ca = pred == actual ca = ca.sum() / len(ca) self.assertGreater(ca, 0.95) self.assertLess(ca, 1.)
Fix test of NaiveBayes on SQL table
Fix test of NaiveBayes on SQL table
Python
bsd-2-clause
cheral/orange3,marinkaz/orange3,qusp/orange3,qPCR4vir/orange3,qusp/orange3,qusp/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,marinkaz/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,cheral/orange3,cheral/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,kwikadi/orange3,qPCR4vir/orange3,qusp/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3
4fc1cd3e3b225b981521cf321f8c4441f45c3252
test/pytorch/test_cnn.py
test/pytorch/test_cnn.py
import logging import unittest from ..helpers import run_module logging.basicConfig(level=logging.DEBUG) class PytorchCNNTest(unittest.TestCase): def setUp(self): self.name = "benchmarker" self.imgnet_args = [ "--framework=pytorch", "--problem_size=4", "--batch_size=2", ] def test_vgg16(self): run_module(self.name, "--problem=cnn2d_toy", *self.imgnet_args)
import logging import unittest from ..helpers import run_module logging.basicConfig(level=logging.DEBUG) class PytorchCNNTest(unittest.TestCase): def setUp(self): self.name = "benchmarker" self.args = [ "--problem=cnn2d_toy", "--framework=pytorch", "--problem_size=4", "--batch_size=2", ] def test_cnn2d_inference(self): run_module(self.name, "--mode=inference", *self.args) def test_cnn2d_training(self): run_module(self.name, "--mode=training", *self.args)
Add training and inference test
Add training and inference test
Python
mpl-2.0
undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker,undertherain/benchmarker
90bdcad66a6f29c9e3d731b5b09b0a2ba477ae2f
tviit/urls.py
tviit/urls.py
from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^', views.IndexView.as_view(), name='tviit_index'), ]
from django.conf.urls import include, url from django.contrib import admin from . import views urlpatterns = [ url(r'^$', views.IndexView.as_view(), name='tviit_index'), url(r'create/$', views.create_tviit, name="create_tviit"), ]
Create url-patterns for tviit creation
Create url-patterns for tviit creation
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
881222a49c6b3e8792adf5754c61992bd12c7b28
tests/test_conduction.py
tests/test_conduction.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) self.conduction = pymongo.MongoClient(self.mockup.uri).test def test_bad_command_name(self): with self.assertRaises(OperationFailure): self.conduction.command('foo') if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # -*- coding: utf-8 -*- """Test Mongo Conduction.""" import logging import pymongo from mockupdb import go from pymongo.errors import OperationFailure from conduction.server import get_mockup, main_loop from tests import unittest # unittest2 on Python 2.6. class ConductionTest(unittest.TestCase): def setUp(self): self.mockup = get_mockup(releases={}, env=None, port=None, verbose=False) # Quiet. logging.getLogger('mongo_orchestration.apps').setLevel(logging.CRITICAL) self.mockup.run() self.loop_future = go(main_loop, self.mockup) # Cleanups are LIFO: Stop the server, wait for the loop to exit. self.addCleanup(self.loop_future) self.addCleanup(self.mockup.stop) # Any database name will do. self.conduction = pymongo.MongoClient(self.mockup.uri).conduction def test_root_uri(self): reply = self.conduction.command('get', '/') self.assertIn('links', reply) self.assertIn('service', reply) def test_bad_command_name(self): with self.assertRaises(OperationFailure) as context: self.conduction.command('foo') self.assertIn('unrecognized: {"foo": 1}', str(context.exception)) def test_server_id_404(self): with self.assertRaises(OperationFailure) as context: self.conduction.command({'post': '/v1/servers/'}) self.assertIn('404 Not Found', str(context.exception)) if __name__ == '__main__': unittest.main()
Test root URI and 404s.
Test root URI and 404s.
Python
apache-2.0
ajdavis/mongo-conduction
86480f2a4948b58f458d686be5fba330abf9743f
rx/core/operators/delaysubscription.py
rx/core/operators/delaysubscription.py
from datetime import datetime from typing import Union, Callable from rx import empty, timer, operators as ops from rx.core import Observable def _delay_subscription(duetime: Union[datetime, int]) -> Callable[[Observable], Observable]: """Time shifts the observable sequence by delaying the subscription. 1 - res = source.delay_subscription(5000) # 5s duetime -- Absolute or relative time to perform the subscription at. Returns time-shifted sequence. """ def delay_subscription(source: Observable) -> Observable: def mapper(_) -> Observable: return empty() return source.pipe(ops.delay_with_selector(timer(duetime), mapper)) return delay_subscription
from datetime import datetime from typing import Union, Callable from rx import empty, timer, operators as ops from rx.core import Observable def _delay_subscription(duetime: Union[datetime, int]) -> Callable[[Observable], Observable]: """Time shifts the observable sequence by delaying the subscription. 1 - res = source.delay_subscription(5000) # 5s duetime -- Absolute or relative time to perform the subscription at. Returns time-shifted sequence. """ def delay_subscription(source: Observable) -> Observable: def mapper(_) -> Observable: return empty() return source.pipe(ops.delay_with_mapper(timer(duetime), mapper)) return delay_subscription
Fix call to renamed function
Fix call to renamed function
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY
e3b1a323921b8331d7fd84c013e80a89a5b21bde
haproxy_status.py
haproxy_status.py
#!/usr/bin/env python from BaseHTTPServer import BaseHTTPRequestHandler from helpers.etcd import Etcd from helpers.postgresql import Postgresql import sys, yaml, socket f = open(sys.argv[1], "r") config = yaml.load(f.read()) f.close() etcd = Etcd(config["etcd"]) postgresql = Postgresql(config["postgresql"]) class StatusHandler(BaseHTTPRequestHandler): def do_GET(self): return self.do_ANY() def do_OPTIONS(self): return self.do_ANY() def do_ANY(self): if postgresql.name == etcd.current_leader()["hostname"]: self.send_response(200) else: self.send_response(503) self.end_headers() self.wfile.write('\r\n') return try: from BaseHTTPServer import HTTPServer host, port = config["haproxy_status"]["listen"].split(":") server = HTTPServer((host, int(port)), StatusHandler) print 'listening on %s:%s' % (host, port) server.serve_forever() except KeyboardInterrupt: print('^C received, shutting down server') server.socket.close()
#!/usr/bin/env python from BaseHTTPServer import BaseHTTPRequestHandler from helpers.etcd import Etcd from helpers.postgresql import Postgresql import sys, yaml, socket f = open(sys.argv[1], "r") config = yaml.load(f.read()) f.close() etcd = Etcd(config["etcd"]) postgresql = Postgresql(config["postgresql"]) class StatusHandler(BaseHTTPRequestHandler): def do_GET(self): return self.do_ANY() def do_OPTIONS(self): return self.do_ANY() def do_ANY(self): leader = etcd.current_leader() is_leader = leader != None and postgresql.name == leader["hostname"] if ((self.path == "/" or self.path == "/master") and is_leader) or (self.path == "/replica" and not is_leader): self.send_response(200) else: self.send_response(503) self.end_headers() self.wfile.write('\r\n') return try: from BaseHTTPServer import HTTPServer host, port = config["haproxy_status"]["listen"].split(":") server = HTTPServer((host, int(port)), StatusHandler) print 'listening on %s:%s' % (host, port) server.serve_forever() except KeyboardInterrupt: print('^C received, shutting down server') server.socket.close()
Add the ability to query for the replica status of a PG instance
Add the ability to query for the replica status of a PG instance
Python
mit
Tapjoy/governor
d0191c43c784b229ce104700989dfb91c67ec490
helper/windows.py
helper/windows.py
""" Windows platform support for running the application as a detached process. """ import multiprocessing import subprocess import sys DETACHED_PROCESS = 8 class Daemon(object): def __init__(self, controller, user=None, group=None, pid_file=None, prevent_core=None, exception_log=None): """Daemonize the controller, optionally passing in the user and group to run as, a pid file, if core dumps should be prevented and a path to write out exception logs to. :param helper.Controller controller: The controller to daaemonize & run :param str user: Optional username to run as :param str group: Optional group to run as :param str pid_file: Optional path to the pidfile to run :param bool prevent_core: Don't make any core files :param str exception_log: Optional exception log path """ args = [sys.executable] args.extend(sys.argv) self.pid = subprocess.Popen(args, creationflags=DETACHED_PROCESS, shell=True).pid
""" Windows platform support for running the application as a detached process. """ import subprocess import sys DETACHED_PROCESS = 8 class Daemon(object): """Daemonize the helper application, putting it in a forked background process. """ def __init__(self, controller): raise NotImplementedError #args = [sys.executable] #args.extend(sys.argv) #self.pid = subprocess.Popen(args, # creationflags=DETACHED_PROCESS, # shell=True).pid
Raise a NotImplementedError for Windows
Raise a NotImplementedError for Windows
Python
bsd-3-clause
gmr/helper,dave-shawley/helper,gmr/helper
b8350e91d7bd1e3a775ed230820c96a180a2ad02
tests/test_solver.py
tests/test_solver.py
from tinyik import Link, Joint, FKSolver from .utils import x, y, z, theta, approx_eq def test_forward_kinematics(): fk = FKSolver([ Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.]) ]) assert all(fk.solve([0., 0.]) == [2., 0., 0.]) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z])
from tinyik import Link, Joint, FKSolver, CCDFKSolver, CCDIKSolver from .utils import x, y, z, theta, approx_eq components = [Joint('z'), Link([1., 0., 0.]), Joint('y'), Link([1., 0., 0.])] predicted = [2., 0., 0.] def test_fk(): fk = FKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_fk(): fk = CCDFKSolver(components) assert all(fk.solve([0., 0.]) == predicted) assert approx_eq(fk.solve([theta, theta]), [x, y, -z]) assert approx_eq(fk.solve([-theta, -theta]), [x, -y, z]) def test_ccd_ik(): fk = CCDFKSolver(components) ik = CCDIKSolver(fk) assert approx_eq(ik.solve([0., 0.], [x, y, -z]), [theta, theta]) assert approx_eq(ik.solve([0., 0.], [x, -y, z]), [-theta, -theta])
Add tests for CCD IK solver
Add tests for CCD IK solver
Python
mit
lanius/tinyik
2f63f134d2c9aa67044eb176a3f81857279f107d
troposphere/utils.py
troposphere/utils.py
import time def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name): """Show and then tail the event log""" def tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: tail_print(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: tail_print(e) seen.add(e.event_id) time.sleep(5)
import time def _tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name, log_func=_tail_print, sleep_time=5): """Show and then tail the event log""" # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: log_func(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: log_func(e) seen.add(e.event_id) time.sleep(sleep_time)
Support a custom logging function and sleep time within tail
Support a custom logging function and sleep time within tail
Python
bsd-2-clause
mhahn/troposphere
35594a4f8c549d507c7d7030141ae511aed57c09
workflowmax/__init__.py
workflowmax/__init__.py
from .api import WorkflowMax # noqa __version__ = "0.1.0"
from .api import WorkflowMax # noqa from .credentials import Credentials # noqa __version__ = "0.1.0"
Add Credentials to root namespace
Add Credentials to root namespace
Python
bsd-3-clause
ABASystems/pyworkflowmax
ab5aac0c9b0e075901c4cd8dd5d134e79f0e0110
brasileirao/spiders/results_spider.py
brasileirao/spiders/results_spider.py
import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first().encode('utf8') item['away_team'] = away_team.css("abbr::attr(title)").extract_first().encode('utf8') item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
# -*- coding: utf-8 -*- import scrapy import scrapy.selector from brasileirao.items import BrasileiraoItem import hashlib class ResultsSpider(scrapy.Spider): name = "results" start_urls = [ 'https://esporte.uol.com.br/futebol/campeonatos/brasileirao/jogos/', ] def parse(self, response): actual_round = 0 for rodada in response.css('.rodadas .confrontos li'): actual_round += 1 for game in rodada.css(".confronto"): home_team = game.css(".partida .time1") away_team = game.css(".partida .time2") item = BrasileiraoItem() item['rodada'] = actual_round item['home_team'] = home_team.css("abbr::attr(title)").extract_first() item['away_team'] = away_team.css("abbr::attr(title)").extract_first() item['home_score'] = home_team.css(".gols::text").extract_first() item['away_score'] = away_team.css(".gols::text").extract_first() item['date'] = game.css(".info-partida time::attr(datetime)").extract_first() id = item['home_team'] + item['away_team'] item['id'] = hashlib.md5(id).hexdigest() yield item
Set utf-8 as default encoding.
Set utf-8 as default encoding.
Python
mit
pghilardi/live-football-client
a3c1822dd2942de4b6bf5cac14039e6789babf85
wafer/pages/admin.py
wafer/pages/admin.py
from django.contrib import admin from wafer.pages.models import File, Page class PageAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
from django.contrib import admin from wafer.pages.models import File, Page from reversion.admin import VersionAdmin class PageAdmin(VersionAdmin, admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} list_display = ('name', 'slug', 'get_people_display_names', 'get_in_schedule') admin.site.register(Page, PageAdmin) admin.site.register(File)
Add reversion support to Pages
Add reversion support to Pages
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
d30605d82d5f04e8478c785f1bb5086066e50878
awx/wsgi.py
awx/wsgi.py
# Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. import logging from django.core.wsgi import get_wsgi_application from awx import prepare_env from awx import __version__ as tower_version """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ # Prepare the AWX environment. prepare_env() logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
# Copyright (c) 2015 Ansible, Inc. # All Rights Reserved. import logging from awx import __version__ as tower_version # Prepare the AWX environment. from awx import prepare_env prepare_env() from django.core.wsgi import get_wsgi_application """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
Fix import error by calling prepare_env first
Fix import error by calling prepare_env first
Python
apache-2.0
wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx
4076fb322814848d802d1f925d163e90b3d629a9
selenium_testcase/testcases/forms.py
selenium_testcase/testcases/forms.py
# -*- coding: utf-8 -*- from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form/*',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def set_input(self, field, value, **kwargs): input = self.find_element( self.input_search_list, field, **kwargs) input.clear() input.send_keys(value) return input
# -*- coding: utf-8 -*- from __future__ import absolute_import from selenium.webdriver.common.by import By from .utils import wait_for class FormTestMixin: # default search element form_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), (By.XPATH, '//form[@action="{}"]',), (By.XPATH, '//form[@name="{}"]',), (By.XPATH, '//form',), ) @wait_for def get_form(self, *args, **kwargs): """ Return form element or None. """ return self.find_element( self.form_search_list, *args, **kwargs) input_search_list = ( (By.ID, '{}',), (By.NAME, '{}',), ) @wait_for def get_input(self, field, **kwargs): """ Return matching input field. """ return self.find_element( self.input_search_list, field, **kwargs) def set_input(self, field, value, **kwargs): """ Clear the field and enter value. """ element = self.get_input(field, **kwargs) element.clear() element.send_keys(value) return element
Split get_input from set_input in FormTestMixin.
Split get_input from set_input in FormTestMixin. In order to reduce side-effects, this commit moves the @wait_for to a get_input method and set_input operates immediately.
Python
bsd-3-clause
nimbis/django-selenium-testcase,nimbis/django-selenium-testcase
149a8091333766068cac445db770ea73055d8647
simuvex/procedures/stubs/UserHook.py
simuvex/procedures/stubs/UserHook.py
import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
import simuvex class UserHook(simuvex.SimProcedure): NO_RET = True # pylint: disable=arguments-differ def run(self, user_func=None, user_kwargs=None, default_return_addr=None, length=None): result = user_func(self.state, **user_kwargs) if result is None: self.add_successor(self.state, default_return_addr, self.state.se.true, 'Ijk_NoHook') else: for state in result: self.add_successor(state, state.ip, state.scratch.guard, state.scratch.jumpkind)
Make the userhook take the length arg b/c why not
Make the userhook take the length arg b/c why not
Python
bsd-2-clause
axt/angr,schieb/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/simuvex,chubbymaggie/angr,chubbymaggie/simuvex,f-prettyland/angr,axt/angr,angr/angr,f-prettyland/angr,tyb0807/angr,schieb/angr,axt/angr,chubbymaggie/angr,f-prettyland/angr,iamahuman/angr,tyb0807/angr,iamahuman/angr,angr/angr,iamahuman/angr,angr/angr,schieb/angr,chubbymaggie/simuvex,angr/simuvex
8528beef5d10355af07f641b4987df3cd64a7b0f
sprockets/mixins/metrics/__init__.py
sprockets/mixins/metrics/__init__.py
from .influxdb import InfluxDBMixin from .statsd import StatsdMixin version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
try: from .influxdb import InfluxDBMixin from .statsd import StatsdMixin except ImportError as error: def InfluxDBMixin(*args, **kwargs): raise error def StatsdMixin(*args, **kwargs): raise error version_info = (1, 0, 0) __version__ = '.'.join(str(v) for v in version_info) __all__ = ['InfluxDBMixin', 'StatsdMixin']
Make it safe to import __version__.
Make it safe to import __version__.
Python
bsd-3-clause
sprockets/sprockets.mixins.metrics
afa6687c317191b77949ba246f3dcc0909c435f5
organizer/urls/tag.py
organizer/urls/tag.py
from django.conf.urls import url from ..models import Tag from ..utils import DetailView from ..views import ( TagCreate, TagDelete, TagList, TagPageList, TagUpdate) urlpatterns = [ url(r'^$', TagList.as_view(), name='organizer_tag_list'), url(r'^create/$', TagCreate.as_view(), name='organizer_tag_create'), url(r'^(?P<page_number>\d+)/$', TagPageList.as_view(), name='organizer_tag_page'), url(r'^(?P<slug>[\w\-]+)/$', DetailView.as_view( context_object_name='tag', model=Tag, template_name=( 'organizer/tag_detail.html')), name='organizer_tag_detail'), url(r'^(?P<slug>[\w-]+)/delete/$', TagDelete.as_view(), name='organizer_tag_delete'), url(r'^(?P<slug>[\w\-]+)/update/$', TagUpdate.as_view(), name='organizer_tag_update'), ]
from django.conf.urls import url from ..views import ( TagCreate, TagDelete, TagDetail, TagList, TagPageList, TagUpdate) urlpatterns = [ url(r'^$', TagList.as_view(), name='organizer_tag_list'), url(r'^create/$', TagCreate.as_view(), name='organizer_tag_create'), url(r'^(?P<page_number>\d+)/$', TagPageList.as_view(), name='organizer_tag_page'), url(r'^(?P<slug>[\w\-]+)/$', TagDetail.as_view(), name='organizer_tag_detail'), url(r'^(?P<slug>[\w-]+)/delete/$', TagDelete.as_view(), name='organizer_tag_delete'), url(r'^(?P<slug>[\w\-]+)/update/$', TagUpdate.as_view(), name='organizer_tag_update'), ]
Revert to Tag Detail URL pattern.
Ch17: Revert to Tag Detail URL pattern.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
7b1d520278b8fe33b68103d26f9aa7bb945f6791
cryptography/hazmat/backends/__init__.py
cryptography/hazmat/backends/__init__.py
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cryptography.hazmat.backends import openssl from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) def default_backend(): return openssl.backend
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cryptography.hazmat.backends import openssl from cryptography.hazmat.backends.multibackend import MultiBackend from cryptography.hazmat.bindings.commoncrypto.binding import ( Binding as CommonCryptoBinding ) _ALL_BACKENDS = [openssl.backend] if CommonCryptoBinding.is_available(): from cryptography.hazmat.backends import commoncrypto _ALL_BACKENDS.append(commoncrypto.backend) _default_backend = MultiBackend(_ALL_BACKENDS) def default_backend(): return _default_backend
Make the default backend be a multi-backend
Make the default backend be a multi-backend
Python
bsd-3-clause
bwhmather/cryptography,Ayrx/cryptography,bwhmather/cryptography,Lukasa/cryptography,Ayrx/cryptography,bwhmather/cryptography,kimvais/cryptography,skeuomorf/cryptography,dstufft/cryptography,kimvais/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,Lukasa/cryptography,sholsapp/cryptography,Hasimir/cryptography,dstufft/cryptography,Ayrx/cryptography,skeuomorf/cryptography,dstufft/cryptography,sholsapp/cryptography,Hasimir/cryptography,Hasimir/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,kimvais/cryptography,sholsapp/cryptography,bwhmather/cryptography
0f3b413b269f8b95b6f8073ba39d11f156ae632c
zwebtest.py
zwebtest.py
""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine Copyright 2003 Paul Scott-Murphy, 2014 William McBrine This module provides a unit test suite for the Multicast DNS Service Discovery for Python module. This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ __author__ = 'Paul Scott-Murphy' __maintainer__ = 'William McBrine <[email protected]>' __version__ = '0.14-wmcbrine' __license__ = 'LGPL' from zeroconf import * import socket desc = {'path':'/~paulsm/'} info = ServiceInfo("_http._tcp.local.", "Paul's Test Web Site._http._tcp.local.", socket.inet_aton("10.0.1.2"), 80, 0, 0, desc, "ash-2.local.") r = Zeroconf() print "Registration of a service..." r.registerService(info) print "Waiting..."
from zeroconf import * import socket desc = {'path': '/~paulsm/'} info = ServiceInfo("_http._tcp.local.", "Paul's Test Web Site._http._tcp.local.", socket.inet_aton("10.0.1.2"), 80, 0, 0, desc, "ash-2.local.") r = Zeroconf() print "Registration of a service..." r.registerService(info) raw_input("Waiting (press Enter to exit)...") print "Unregistering..." r.unregisterService(info) r.close()
Allow graceful exit from announcement test.
Allow graceful exit from announcement test.
Python
lgpl-2.1
basilfx/python-zeroconf,daid/python-zeroconf,jstasiak/python-zeroconf,gbiddison/python-zeroconf,giupo/python-zeroconf,AndreaCensi/python-zeroconf,nameoftherose/python-zeroconf,balloob/python-zeroconf,wmcbrine/pyzeroconf,decabyte/python-zeroconf,jantman/python-zeroconf
5957999c52f939691cbe6b8dd5aa929980a24501
tests/unit/test_start.py
tests/unit/test_start.py
import pytest from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
from iwant_bot import start def test_add(): assert start.add_numbers(0, 0) == 0 assert start.add_numbers(1, 1) == 2
Remove the unused pytest import
Remove the unused pytest import
Python
mit
kiwicom/iwant-bot
f5d4da9fa71dbb59a9459e376fde8840037bf39a
account_banking_sepa_credit_transfer/__init__.py
account_banking_sepa_credit_transfer/__init__.py
# -*- encoding: utf-8 -*- ############################################################################## # # SEPA Credit Transfer module for OpenERP # Copyright (C) 2010-2013 Akretion (http://www.akretion.com) # @author: Alexis de Lattre <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import wizard from . import models
# -*- encoding: utf-8 -*- ############################################################################## # # SEPA Credit Transfer module for OpenERP # Copyright (C) 2010-2013 Akretion (http://www.akretion.com) # @author: Alexis de Lattre <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from . import wizard
Remove import models from init in sepa_credit_transfer
Remove import models from init in sepa_credit_transfer
Python
agpl-3.0
open-synergy/bank-payment,sergio-incaser/bank-payment,hbrunn/bank-payment,sergio-teruel/bank-payment,ndtran/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,sergiocorato/bank-payment,damdam-s/bank-payment,CompassionCH/bank-payment,CompassionCH/bank-payment,incaser/bank-payment,Antiun/bank-payment,sergio-teruel/bank-payment,damdam-s/bank-payment,syci/bank-payment,sergio-incaser/bank-payment,David-Amaro/bank-payment,rlizana/bank-payment,Antiun/bank-payment,sergiocorato/bank-payment,ndtran/bank-payment,acsone/bank-payment,syci/bank-payment,diagramsoftware/bank-payment
f1a1272bebcc4edf9063c75d3fe29fdcb9e277eb
rml/unitconversion.py
rml/unitconversion.py
import numpy as np class UnitConversion(): def __init__(self, coef): self.p = np.poly1d(coef) def machine_to_physics(self, machine_value): return self.p(machine_value) def physics_to_machine(self, physics_value): roots = (self.p - physics_value).roots positive_roots = [root for root in roots if root > 0] if len(positive_roots) > 0: return positive_roots[0] else: raise ValueError("No corresponding positive machine value:", roots)
import numpy as np from scipy.interpolate import PchipInterpolator class UnitConversion(): def __init__(self, coef): self.p = np.poly1d(coef) def machine_to_physics(self, machine_value): return self.p(machine_value) def physics_to_machine(self, physics_value): roots = (self.p - physics_value).roots positive_roots = [root for root in roots if root > 0] if len(positive_roots) > 0: return positive_roots[0] else: raise ValueError("No corresponding positive machine value:", roots) class PPConversion(): def __init__(self, x, y): self.x = x self.y = y self.pp = PchipInterpolator(x, y) def machine_to_physics(self, machine_value): return self.pp(machine_value) def physics_to_machine(self, physics_value): pass
Add PPChipInterpolator unit conversion class
Add PPChipInterpolator unit conversion class
Python
apache-2.0
willrogers/pml,razvanvasile/RML,willrogers/pml
00e4663940ed1d22e768b3de3d1c645c8649aecc
src/WhiteLibrary/keywords/items/textbox.py
src/WhiteLibrary/keywords/items/textbox.py
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
from TestStack.White.UIItems import TextBox from WhiteLibrary.keywords.librarycomponent import LibraryComponent from WhiteLibrary.keywords.robotlibcore import keyword class TextBoxKeywords(LibraryComponent): @keyword def input_text_to_textbox(self, locator, input_value): """ Writes text to a textbox. ``locator`` is the locator of the text box. ``input_value`` is the text to write. """ textBox = self.state._get_typed_item_by_locator(TextBox, locator) textBox.Text = input_value @keyword def verify_text_in_textbox(self, locator, expected): """ Verifies text in a text box. ``locator`` is the locator of the text box. ``expected`` is the expected text of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) self.state._verify_value(expected, textbox.Text) @keyword def get_text_from_textbox(self, locator): """ Gets text from text box. ``locator`` is the locator of the text box. """ textbox = self.state._get_typed_item_by_locator(TextBox, locator) return textbox.Text
Change to better argument name
Change to better argument name
Python
apache-2.0
Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary
23f32962e136dae32b3165f15f8ef7e2d5319530
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # regex below borrowed from https://github.com/roadhump/SublimeLinter-eslint # and is MIT-licensed from roadhump. # # Written by andrewdeandrade # Copyright (c) 2014 andrewdeandrade # # License: MIT # """This module exports the LintTrap plugin class.""" from SublimeLinter.lint import NodeLinter, util class LintTrap(NodeLinter): """Provides an interface to the lint-trap executable.""" syntax = ('javascript', 'javascriptnext') cmd = ('lint-trap', '--reporter=compact', '-') executable = None version_args = '--version' version_re = r'(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.3.0' regex = ( r'^.+?: line (?P<line>\d+), col (?P<col>\d+), ' r'(?:(?P<error>Error)|(?P<warning>Warning)) - ' r'(?P<message>.+)' ) multiline = False line_col_base = (1, 0) tempfile_suffix = None error_stream = util.STREAM_BOTH selectors = {} word_re = None defaults = {} inline_settings = None inline_overrides = None comment_re = r'\s*/[/*]'
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # regex below borrowed from https://github.com/roadhump/SublimeLinter-eslint # and is MIT-licensed from roadhump. # # Written by andrewdeandrade # Copyright (c) 2014 andrewdeandrade # # License: MIT # """This module exports the LintTrap plugin class.""" from SublimeLinter.lint import NodeLinter, util class LintTrap(NodeLinter): """Provides an interface to the lint-trap executable.""" syntax = ('javascript', 'javascriptnext') cmd = ('lint-trap', '--reporter=compact', '-') executable = None version_args = '--version' version_re = r'(?P<version>\d+\.\d+\.\d+)' version_requirement = '>= 0.4.0' regex = ( r'^.+?: line (?P<line>\d+), col (?P<col>\d+), ' r'(?:(?P<error>Error)|(?P<warning>Warning)) - ' r'(?P<message>.+)' ) multiline = False line_col_base = (1, 0) tempfile_suffix = None error_stream = util.STREAM_BOTH selectors = {} word_re = None defaults = {} inline_settings = None inline_overrides = None comment_re = r'\s*/[/*]'
Set minimum lint-trap version to 0.4.0
Set minimum lint-trap version to 0.4.0
Python
mit
Raynos/SublimeLinter-contrib-lint-trap
5cbae4953ea0bde97b0cf7cd914a3a5263fa74be
manage.py
manage.py
#!/usr/bin/env python import os import sys import re def read_env(): """Pulled from Honcho code with minor updates, reads local default environment variables from a .env file located in the project root directory. """ try: with open('.env') as f: content = f.read() except IOError: content = '' for line in content.splitlines(): m1 = re.match(r'\A([A-Za-z_0-9]+)=(.*)\Z', line) if m1: key, val = m1.group(1), m1.group(2) m2 = re.match(r"\A'(.*)'\Z", val) if m2: val = m2.group(1) m3 = re.match(r'\A"(.*)"\Z', val) if m3: val = re.sub(r'\\(.)', r'\1', m3.group(1)) os.environ.setdefault(key, val) if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "feedleap.settings") from django.core.management import execute_from_command_line read_env() execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys import re def read_env(): """Pulled from Honcho code with minor updates, reads local default environment variables from a .env file located in the project root directory. """ try: with open('.env') as f: content = f.read() except IOError: content = '' for line in content.splitlines(): m1 = re.match(r'\A([A-Za-z_0-9]+)=(.*)\Z', line) if m1: key, val = m1.group(1), m1.group(2) m2 = re.match(r"\A'(.*)'\Z", val) if m2: val = m2.group(1) m3 = re.match(r'\A"(.*)"\Z', val) if m3: val = re.sub(r'\\(.)', r'\1', m3.group(1)) os.environ.setdefault(key, val) if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "feedleap.settings.base") from django.core.management import execute_from_command_line read_env() execute_from_command_line(sys.argv)
Make base module default DJANGO_SETTINGS_MODULE
Make base module default DJANGO_SETTINGS_MODULE
Python
mit
jpadilla/feedleap,jpadilla/feedleap
39dbbac659e9ae9c1bbad8a979cc99ef6eafaeff
models.py
models.py
#!/usr/bin/env python import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return self.result
#!/usr/bin/env python import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL') db = SQLAlchemy(app) class FoodMenu(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodMenu('%s')>" % (self.result) class FoodServices(db.Model): id = db.Column(db.Integer, primary_key=True) result = db.Column(db.Text) def __init__(self, result): self.result = result def __repr__(self): return "<FoodServices('%s')>" % (self.result)
Include class name in model representations
Include class name in model representations
Python
mit
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
e16c65ec8c774cc27f9f7aa43e88521c3854b6b7
ella/imports/management/commands/fetchimports.py
ella/imports/management/commands/fetchimports.py
from django.core.management.base import BaseCommand from optparse import make_option class Command(BaseCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all fetch_all()
from django.core.management.base import NoArgsCommand from optparse import make_option import sys class Command(NoArgsCommand): help = 'Fetch all registered imports' def handle(self, *test_labels, **options): from ella.imports.models import fetch_all errors = fetch_all() if errors: sys.exit(errors)
Return exit code (count of errors)
Return exit code (count of errors) git-svn-id: 6ce22b13eace8fe533dbb322c2bb0986ea4cd3e6@520 2d143e24-0a30-0410-89d7-a2e95868dc81
Python
bsd-3-clause
MichalMaM/ella,MichalMaM/ella,WhiskeyMedia/ella,whalerock/ella,ella/ella,whalerock/ella,WhiskeyMedia/ella,petrlosa/ella,petrlosa/ella,whalerock/ella
45c400e02fbeb5b455e27fef81e47e45f274eaec
core/forms.py
core/forms.py
from django import forms class GameForm(forms.Form): amount = forms.IntegerField() def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
from django import forms class GameForm(forms.Form): amount = forms.IntegerField(initial=100) def __init__(self, *args, **kwargs): super(GameForm, self).__init__(*args, **kwargs) for name, field in self.fields.items(): if isinstance(field, forms.IntegerField): self.fields[name].widget.input_type = "number" if field.required: self.fields[name].widget.attrs["required"] = ""
Add a default bet amount.
Add a default bet amount.
Python
bsd-2-clause
stephenmcd/gamblor,stephenmcd/gamblor
fcc571d2f4c35ac8f0e94e51e6ac94a0c051062d
src/rinoh/__init__.py
src/rinoh/__init__.py
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. """rinohtype """ import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'color', 'dimension', 'document', 'draw', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'text'] __all__ = CORE_MODULES + ['font', 'frontend', 'backend', 'styleds', 'styles'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
# This file is part of rinohtype, the Python document preparation system. # # Copyright (c) Brecht Machiels. # # Use of this source code is subject to the terms of the GNU Affero General # Public License v3. See the LICENSE file or http://www.gnu.org/licenses/. """rinohtype """ import os import sys from importlib import import_module from .version import __version__, __release_date__ if sys.version_info < (3, 3): print('rinohtype requires Python 3.3 or higher') sys.exit(1) CORE_MODULES = ['annotation', 'attribute', 'color', 'dimension', 'document', 'draw', 'element', 'float', 'flowable', 'highlight', 'index', 'inline', 'layout', 'number', 'paper', 'paragraph', 'reference', 'structure', 'style', 'table', 'template', 'text'] __all__ = CORE_MODULES + ['font', 'fonts', 'frontend', 'backend', 'resource', 'styleds', 'styles', 'stylesheets', 'templates', 'strings', 'language'] DATA_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'data') # create proxies for the core classes/constants at the top level for easy access for name in CORE_MODULES: module = import_module('.' + name, __name__) module_dict, module_all = module.__dict__, module.__all__ globals().update({name: module_dict[name] for name in module_all}) __all__ += module_all
Update the top-level rinoh package
Update the top-level rinoh package Make all symbols and modules relevant to users available directly from the rinoh package.
Python
agpl-3.0
brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype
f9293d838a21f495ea9b56cbe0f6f75533360aed
pyinfra/api/config.py
pyinfra/api/config.py
import six from pyinfra import logger class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Replace TIMEOUT -> CONNECT_TIMEOUT if 'TIMEOUT' in kwargs: logger.warning(( 'Config.TIMEOUT is deprecated, ' 'please use Config.CONNECT_TIMEOUT instead' )) kwargs['CONNECT_TIMEOUT'] = kwargs.pop('TIMEOUT') # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
import six class Config(object): ''' The default/base configuration options for a pyinfra deploy. ''' state = None # % of hosts which have to fail for all operations to stop FAIL_PERCENT = None # Seconds to timeout SSH connections CONNECT_TIMEOUT = 10 # Temporary directory (on the remote side) to use for caching any files/downloads TEMP_DIR = '/tmp' # Gevent pool size (defaults to #of target hosts) PARALLEL = None # Specify a minimum required pyinfra version for a deploy MIN_PYINFRA_VERSION = None # All these can be overridden inside individual operation calls: # Switch to this user (from ssh_user) using su before executing operations SU_USER = None USE_SU_LOGIN = False # Use sudo and optional user SUDO = False SUDO_USER = None PRESERVE_SUDO_ENV = False USE_SUDO_LOGIN = False USE_SUDO_PASSWORD = False # Only show errors, but don't count as failure IGNORE_ERRORS = False # Shell to use to execute commands SHELL = None def __init__(self, **kwargs): # Always apply some env env = kwargs.pop('ENV', {}) self.ENV = env # Apply kwargs for key, value in six.iteritems(kwargs): setattr(self, key, value)
Remove support for deprecated `Config.TIMEOUT`.
Remove support for deprecated `Config.TIMEOUT`.
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
94596f036270f8958afd84eb9788ce2b15f5cbd4
registration/admin.py
registration/admin.py
from django.contrib import admin from registration.models import RegistrationProfile class RegistrationAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'activation_key_expired') search_fields = ('user__username', 'user__first_name') admin.site.register(RegistrationProfile, RegistrationAdmin)
from django.contrib import admin from registration.models import RegistrationProfile class RegistrationAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'activation_key_expired') raw_id_fields = ['user'] search_fields = ('user__username', 'user__first_name') admin.site.register(RegistrationProfile, RegistrationAdmin)
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
Python
bsd-3-clause
rafaduran/django-pluggable-registration,rbarrois/django-registration,maraujop/django-registration,thedod/django-registration-hg-mirror,CoatedMoose/django-registration,AndrewLvov/django-registration,AndrewLvov/django-registration,aptivate/django-registration,fedenko/django-registration,CoatedMoose/django-registration,christang/django-registration-1.5,siddharthsarda/django-registration,QPmedia/django-registration,christang/django-registration-1.5,newvem/django-registration,rbarrois/django-registration,QPmedia/django-registration,pelletier/django-registration-81,fedenko/django-registration,newvem/django-registration,aptivate/django-registration
4765a88535262df6373e4f8d2111032fc290da85
atest/testdata/parsing/custom-lang.py
atest/testdata/parsing/custom-lang.py
from robot.conf import Language class Fi(Language): setting_headers = {'H 1'} variable_headers = {'H 2'} test_case_headers = {'H 3'} task_headers = {'H 4'} keyword_headers = {'H 5'} comment_headers = {'H 6'} library = 'L' resource = 'R' variables = 'V' documentation = 'S 1' metadata = 'S 2' suite_setup = 'S 3' suite_teardown = 'S 4' test_setup = 'S 5' test_teardown = 'S 6' test_template = 'S 7' force_tags = 'S 8' default_tags = 'S 9' test_timeout = 'S 10' setup = 'S 11' teardown = 'S 12' template = 'S 13' tags = 'S 14' timeout = 'S 15' arguments = 'S 16' return_ = 'S 17' bdd_prefixes = {}
from robot.conf import Language class Custom(Language): setting_headers = {'H 1'} variable_headers = {'H 2'} test_case_headers = {'H 3'} task_headers = {'H 4'} keyword_headers = {'H 5'} comment_headers = {'H 6'} library = 'L' resource = 'R' variables = 'V' documentation = 'S 1' metadata = 'S 2' suite_setup = 'S 3' suite_teardown = 'S 4' test_setup = 'S 5' test_teardown = 'S 6' test_template = 'S 7' force_tags = 'S 8' default_tags = 'S 9' test_timeout = 'S 10' setup = 'S 11' teardown = 'S 12' template = 'S 13' tags = 'S 14' timeout = 'S 15' arguments = 'S 16' return_ = 'S 17' bdd_prefixes = {}
Fix class name in test file
Fix class name in test file
Python
apache-2.0
robotframework/robotframework,HelioGuilherme66/robotframework,HelioGuilherme66/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,robotframework/robotframework
22f3d6d6fdc3e5f07ead782828b406c9a27d0199
UDPSender.py
UDPSender.py
from can import Listener import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
from can import Listener from socket import socket class UDPSender(Listener): dataConvert = {"0x600": {"String":"RPM:", "Slot":0, "Conversion":1}, "0x601": {"String":"OIL:", "Slot":2, "Conversion":(1/81.92)}} def __init__(self, IP="10.0.0.4", PORT=5555): self.ip = IP self.port = PORT self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def on_message_received(self, msg): udpMessage = self.can_to_udp_message(msg) if udpMessage: self.sock.sendto(udpMessage.encode(), (self.ip, self.port)) def can_to_udp_message(self, msg): hexId = msg.arbritation_id if self.dataConvert.get(hexId): dataId = self.dataConvert[hexId]["String"] dataSlot = self.dataConvert[hexId]["Slot"] dataConversion = self.dataConvert[hexID]["Conversion"] data = ( (msg.data[dataSlot] << 8) + msg.data[dataSlot + 1] ) * dataConversion udpMessage = dataId + data return udpMessage else: return None def __del__(self): self.sock.close()
Change of import of libraries.
Change of import of libraries. Tried to fix issue displayed below. [root@alarm BeagleDash]# python3.3 CANtoUDP.py Traceback (most recent call last): File "CANtoUDP.py", line 10, in <module> listeners = [csv, UDPSender()] TypeError: 'module' object is not callable Exception AttributeError: "'super' object has no attribute '__del__'" in <bound method CSVWriter.__del__ of <can.CAN.CSVWriter object at 0xb6867730>> ignored
Python
mit
TAURacing/BeagleDash
da5db320bd96ff881be23c91f8f5d69505d67946
src/project_name/urls.py
src/project_name/urls.py
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.views.generic.base import TemplateView urlpatterns = [ url(r'^admin_tools/', include('admin_tools.urls')), url(r'^admin/', include(admin.site.urls)), # Simply show the master template. url(r'^$', TemplateView.as_view(template_name='demo.html')), ] # NOTE: The staticfiles_urlpatterns also discovers static files (ie. no need to run collectstatic). Both the static # folder and the media folder are only served via Django if DEBUG = True. urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.contrib.staticfiles.urls import staticfiles_urlpatterns from django.views.generic.base import TemplateView urlpatterns = [ url(r'^admin_tools/', include('admin_tools.urls')), url(r'^admin/', include(admin.site.urls)), # Simply show the master template. url(r'^$', TemplateView.as_view(template_name='demo.html')), ] # NOTE: The staticfiles_urlpatterns also discovers static files (ie. no need to run collectstatic). Both the static # folder and the media folder are only served via Django if DEBUG = True. urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)), ]
Add missing configuration for DjDT
Add missing configuration for DjDT
Python
mit
Clarity-89/clarityv2,Clarity-89/clarityv2,Clarity-89/clarityv2,Clarity-89/clarityv2
8be2d6c735ebdca542063c34d1048cb70ba7f882
bob/macro.py
bob/macro.py
# # Copyright (c) 2008 rPath, Inc. # # All rights reserved. # ''' Mechanism for expanding macros from a trove context. ''' import logging def expand(raw, parent, trove=None): '''Transform a raw string with available configuration data.''' macros = {} # Basic info macros.update(parent.cfg.macro) for cfg_item in ('tag',): macros[cfg_item] = getattr(parent.cfg, cfg_item) # Additional info available in trove contexts if trove: if trove in parent.targets: hg = parent.targets[trove].hg if hg and parent.hg.has_key(hg): macros['hg'] = parent.hg[hg][1] elif hg: logging.warning('Trove %s references undefined Hg ' 'repository %s', trove, hg) return raw % macros
# # Copyright (c) 2008 rPath, Inc. # # All rights reserved. # ''' Mechanism for expanding macros from a trove context. ''' import logging def expand(raw, parent, trove=None): '''Transform a raw string with available configuration data.''' macros = {} # Basic info macros.update(parent.cfg.macro) for cfg_item in ('targetLabel',): macros[cfg_item] = getattr(parent.cfg, cfg_item) # Additional info available in trove contexts if trove: if trove in parent.targets: hg = parent.targets[trove].hg if hg and parent.hg.has_key(hg): macros['hg'] = parent.hg[hg][1] elif hg: logging.warning('Trove %s references undefined Hg ' 'repository %s', trove, hg) return raw % macros
Delete missed reference to tag config option
Delete missed reference to tag config option
Python
apache-2.0
sassoftware/bob,sassoftware/bob
a3b108bdb03a74be5156a6b34219758f04b75fe8
config.py
config.py
from os import getenv class Config(object): API_KEY = getenv('API_KEY') DAEMON_SLEEP_INTERVAL = 6 # hours MAIL_DEBUG = False MAIL_DEFAULT_SENDER = getenv('SENDER_EMAIL', '[email protected]') MAIL_PASSWORD = getenv('MAILGUN_SMTP_PASSWORD', None) MAIL_PORT = getenv('MAILGUN_SMTP_PORT', 25) MAIL_SERVER = getenv('MAILGUN_SMTP_SERVER', 'localhost') MAIL_USERNAME = getenv('MAILGUN_SMTP_LOGIN', None) MAIL_USE_SSL = False NOTIFY_EMAIL = getenv('NOTIFY_EMAIL', '[email protected]') SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL').replace('mysql2:', 'mysql:') class ProductionConfig(Config): DEBUG = False TESTING = False class DevelopmentConfig(Config): DEBUG = True MAIL_DEBUG = True SQLALCHEMY_DATABASE_URI = 'sqlite:///app.db' class TestingConfig(Config): TESTING = True
from os import getenv class Config(object): API_KEY = getenv('API_KEY') DAEMON_SLEEP_INTERVAL = 6 # hours MAIL_DEBUG = False MAIL_DEFAULT_SENDER = getenv('SENDER_EMAIL', '[email protected]') MAIL_PASSWORD = getenv('MAILGUN_SMTP_PASSWORD', None) MAIL_PORT = getenv('MAILGUN_SMTP_PORT', 25) MAIL_SERVER = getenv('MAILGUN_SMTP_SERVER', 'localhost') MAIL_USERNAME = getenv('MAILGUN_SMTP_LOGIN', None) MAIL_USE_SSL = False NOTIFY_EMAIL = getenv('NOTIFY_EMAIL', '[email protected]') SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL', 'sqlite:///app.db').replace('mysql2:', 'mysql:') class ProductionConfig(Config): DEBUG = False TESTING = False class DevelopmentConfig(Config): DEBUG = True MAIL_DEBUG = True class TestingConfig(Config): TESTING = True
Simplify default database setting, but allow it to be overridden
Simplify default database setting, but allow it to be overridden
Python
mit
taeram/dynamite,taeram/dynamite
1da2c0e00d43c4fb9a7039e98401d333d387a057
saleor/search/views.py
saleor/search/views.py
from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = form.no_query_found() query = form.cleaned_data['q'] ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
from __future__ import unicode_literals from django.core.paginator import Paginator, InvalidPage from django.conf import settings from django.http import Http404 from django.shortcuts import render from .forms import SearchForm from ..product.utils import products_with_details def paginate_results(results, get_data, paginate_by=25): paginator = Paginator(results, paginate_by) page_number = get_data.get('page', 1) try: page = paginator.page(page_number) except InvalidPage: raise Http404('No such page!') return page def search(request): form = SearchForm(data=request.GET or None) if form.is_valid(): visible_products = products_with_details(request.user) results = form.search(model_or_queryset=visible_products) page = paginate_results(results, request.GET, settings.PAGINATE_BY) else: page = [] query = form.cleaned_data.get('q', '') ctx = { 'query': query, 'results': page, 'query_string': '?q=%s' % query} return render(request, 'search/results.html', ctx)
Fix empty search results logic
Fix empty search results logic
Python
bsd-3-clause
mociepka/saleor,jreigel/saleor,itbabu/saleor,maferelo/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,tfroehlich82/saleor,jreigel/saleor,KenMutemi/saleor,itbabu/saleor,car3oon/saleor,maferelo/saleor,car3oon/saleor,UITools/saleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,car3oon/saleor,UITools/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,tfroehlich82/saleor,tfroehlich82/saleor,jreigel/saleor,UITools/saleor
6c9b0b0c7e78524ea889f8a89c2eba8acb57f782
gaphor/ui/iconname.py
gaphor/ui/iconname.py
""" With `get_icon_name` you can retrieve an icon name for a UML model element. """ from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
""" With `get_icon_name` you can retrieve an icon name for a UML model element. """ from gaphor import UML import re from functools import singledispatch TO_KEBAB = re.compile(r"([a-z])([A-Z]+)") def to_kebab_case(s): return TO_KEBAB.sub("\\1-\\2", s).lower() @singledispatch def get_icon_name(element): """ Get an icon name for a UML model element. """ return "gaphor-" + to_kebab_case(element.__class__.__name__) @get_icon_name.register(UML.Class) def get_name_for_class(element): if isinstance(element, UML.Stereotype): return "gaphor-stereotype" elif element.extension: return "gaphor-metaclass" else: return "gaphor-class" @get_icon_name.register(UML.Property) def get_name_for_property(element): if element.association: return "gaphor-association-end" else: return "gaphor-property"
Fix stereotype icon in namespace view
Fix stereotype icon in namespace view
Python
lgpl-2.1
amolenaar/gaphor,amolenaar/gaphor
7c02e103a0af86016500625e20a4d7667e568265
script/jsonify-book.py
script/jsonify-book.py
import sys from glob import glob from os.path import basename import json book_dir, out_dir = sys.argv[1:3] files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")] json_data = {} for path in files: with open(f"{book_dir}/{path}.json", "r") as meta_part: json_data = json.load(meta_part) with open(f"{book_dir}/{path}.xhtml", "r") as book_part: content = book_part.read() json_data["content"] = str(content) with open(f"{out_dir}/{path}.json", 'w') as outfile: json.dump(json_data, outfile)
import sys from glob import glob from os.path import basename import json book_dir, out_dir = sys.argv[1:3] files = [basename(x).rstrip(".xhtml") for x in glob(f"{book_dir}/*.xhtml")] json_data = {} for path in files: with open(f"{book_dir}/{path}-metadata.json", "r") as meta_part: json_data = json.load(meta_part) with open(f"{book_dir}/{path}.xhtml", "r") as book_part: content = book_part.read() json_data["content"] = str(content) with open(f"{out_dir}/{path}.json", 'w') as outfile: json.dump(json_data, outfile)
Add metadata to jsonify json input filename
Add metadata to jsonify json input filename
Python
lgpl-2.1
Connexions/cte,Connexions/cnx-rulesets,Connexions/cnx-recipes,Connexions/cte,Connexions/cnx-rulesets,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-rulesets,Connexions/cnx-rulesets
9a33ac3f563ad657129d64cb591f08f9fd2a00a2
tests/test_command.py
tests/test_command.py
"""Unittest of command entry point.""" # Copyright 2015 Masayuki Yamamoto # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest class TestCreateMainParser(unittest.TestCase): """yanico.command.create_main_parser() test."""
"""Unittest of command entry point.""" # Copyright 2015 Masayuki Yamamoto # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import unittest.mock as mock import yanico import yanico.command class TestCreateMainParser(unittest.TestCase): """yanico.command.create_main_parser() test.""" def test_version(self): """Parse '--version' option.""" parser = yanico.command.create_main_parser() with mock.patch.object(parser, '_print_message') as print_message: self.assertRaises(SystemExit, parser.parse_args, ['--version']) print_message.assert_called_once_with('yanico version ' + yanico.__version__ + '\n', mock.ANY)
Add command test for '--version' option
Add command test for '--version' option Check output that is program name, "version" and version number.
Python
apache-2.0
ma8ma/yanico
9d23940c430a4f95ec11b33362141ec2ffc3f533
src/tempel/models.py
src/tempel/models.py
from datetime import datetime, timedelta from django.db import models from django.conf import settings from tempel import utils def default_edit_expires(): return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE) class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(default=datetime.now) active = models.BooleanField(default=True) edit_token = models.CharField(max_length=8, default=utils.create_token, null=True) edit_expires = models.DateTimeField(default=default_edit_expires, null=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language) def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language)
from datetime import datetime, timedelta from django.db import models from django.conf import settings from tempel import utils def default_edit_expires(): return datetime.now() + timedelta(seconds=60*settings.TEMPEL_EDIT_AGE) class Entry(models.Model): content = models.TextField() language = models.CharField(max_length=20, choices=utils.get_languages()) created = models.DateTimeField(default=datetime.now) active = models.BooleanField(default=True) edit_token = models.CharField(max_length=8, default=utils.create_token, null=True) edit_expires = models.DateTimeField(default=default_edit_expires, null=True) class Meta: ordering = ['-created'] verbose_name_plural = "entries" def get_language(self): return utils.get_language(self.language) def get_mimetype(self): return utils.get_mimetype(self.language) def get_filename(self): return '%s.%s' % (self.id, self.get_extension()) def get_extension(self): return utils.get_extension(self.language) def done_editable(self): self.edit_token = None self.save() def is_editable(self, token): time_ok = self.edit_expires is not None and self.edit_expires >= datetime.now() token_ok = token == self.edit_token return time_ok and time_ok def __unicode__(self): return '<Entry: id=%s lang=%s>' % (self.id, self.language)
Add is_editable and done_editable functions to Entry
Add is_editable and done_editable functions to Entry
Python
agpl-3.0
fajran/tempel
02ca3946662fd996f77c30d9e61d8fc8d9243de7
trac/upgrades/db20.py
trac/upgrades/db20.py
from trac.db import Table, Column, Index, DatabaseManager from trac.core import TracError from trac.versioncontrol.cache import CACHE_YOUNGEST_REV def do_upgrade(env, ver, cursor): """Modify the repository cache scheme (if needed) Now we use the 'youngest_rev' entry in the system table to explicitly store the youngest rev in the cache. """ db = env.get_db_cnx() try: repos = env.get_repository() youngest = repos.get_youngest_rev_in_cache(db) or '' # deleting first, for the 0.11dev and 0.10.4dev users cursor.execute("DELETE FROM system WHERE name=%s", (CACHE_YOUNGEST_REV,)) cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)", (CACHE_YOUNGEST_REV, youngest)) except TracError: # no repository available pass
from trac.db import Table, Column, Index, DatabaseManager from trac.core import TracError from trac.versioncontrol.cache import CACHE_YOUNGEST_REV def do_upgrade(env, ver, cursor): """Modify the repository cache scheme (if needed) Now we use the 'youngest_rev' entry in the system table to explicitly store the youngest rev in the cache. """ db = env.get_db_cnx() try: repos = env.get_repository() youngest = repos.get_youngest_rev_in_cache(db) or '' except TracError: # no repository available youngest = '' # deleting first, for the 0.11dev and 0.10.4dev users cursor.execute("DELETE FROM system WHERE name=%s", (CACHE_YOUNGEST_REV,)) cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)", (CACHE_YOUNGEST_REV, youngest))
Make db upgrade step 20 more robust.
Make db upgrade step 20 more robust. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@5815 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
exocad/exotrac,dokipen/trac,moreati/trac-gitsvn,exocad/exotrac,dokipen/trac,dafrito/trac-mirror,dafrito/trac-mirror,moreati/trac-gitsvn,dafrito/trac-mirror,dafrito/trac-mirror,exocad/exotrac,dokipen/trac,moreati/trac-gitsvn,exocad/exotrac,moreati/trac-gitsvn
ceb75d6f58ab16e3afdf3c7b00de539012d790d5
djangopeoplenet/manage.py
djangopeoplenet/manage.py
#!/usr/bin/env python import sys paths = ( '/home/simon/sites/djangopeople.net', '/home/simon/sites/djangopeople.net/djangopeoplenet', '/home/simon/sites/djangopeople.net/djangopeoplenet/djangopeople/lib', ) for path in paths: if not path in sys.path: sys.path.insert(0, path) from django.core.management import execute_manager try: import settings # Assumed to be in the same directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) sys.exit(1) if __name__ == "__main__": execute_manager(settings)
#!/usr/bin/env python import sys, os root = os.path.dirname(__file__) paths = ( os.path.join(root), os.path.join(root, "djangopeople", "lib"), ) for path in paths: if not path in sys.path: sys.path.insert(0, path) from django.core.management import execute_manager try: import settings # Assumed to be in the same directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) sys.exit(1) if __name__ == "__main__": execute_manager(settings)
Make the lib imports work on other computers than Simon's
Make the lib imports work on other computers than Simon's Signed-off-by: Simon Willison <[email protected]>
Python
mit
brutasse/djangopeople,django/djangopeople,polinom/djangopeople,brutasse/djangopeople,polinom/djangopeople,brutasse/djangopeople,polinom/djangopeople,django/djangopeople,polinom/djangopeople,django/djangopeople,brutasse/djangopeople
bb34b21ebd2378f944498708ac4f13d16aa61aa1
src/mist/io/tests/api/features/steps/backends.py
src/mist/io/tests/api/features/steps/backends.py
from behave import * @given(u'"{text}" backend added') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
from behave import * @given(u'"{text}" backend added through api') def given_backend(context, text): backends = context.client.list_backends() for backend in backends: if text in backend['title']: return @when(u'I list backends') def list_backends(context): context.backends = context.client.list_backends()
Rename Behave steps for api tests
Rename Behave steps for api tests
Python
agpl-3.0
johnnyWalnut/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,munkiat/mist.io,kelonye/mist.io,kelonye/mist.io,afivos/mist.io,Lao-liu/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,johnnyWalnut/mist.io,zBMNForks/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,munkiat/mist.io,zBMNForks/mist.io,munkiat/mist.io,johnnyWalnut/mist.io,afivos/mist.io,munkiat/mist.io,kelonye/mist.io
6f42f03f950e4c3967eb1efd7feb9364c9fbaf1f
google.py
google.py
import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/plus/v1/people/me") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
import os from werkzeug.contrib.fixers import ProxyFix from flask import Flask, redirect, url_for from flask_dance.contrib.google import make_google_blueprint, google from raven.contrib.flask import Sentry app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) sentry = Sentry(app) app.secret_key = os.environ.get("FLASK_SECRET_KEY", "supersekrit") app.config["GOOGLE_OAUTH_CLIENT_ID"] = os.environ.get("GOOGLE_OAUTH_CLIENT_ID") app.config["GOOGLE_OAUTH_CLIENT_SECRET"] = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET") google_bp = make_google_blueprint(scope=["profile", "email"]) app.register_blueprint(google_bp, url_prefix="/login") @app.route("/") def index(): if not google.authorized: return redirect(url_for("google.login")) resp = google.get("/oauth2/v1/userinfo") assert resp.ok, resp.text return "You are {email} on Google".format(email=resp.json()["emails"][0]["value"]) if __name__ == "__main__": app.run()
Use userinfo URI for user profile info
Use userinfo URI for user profile info
Python
mit
singingwolfboy/flask-dance-google
2bb8ee6ae30e233f28ea0ae0fb01c0e4a1f8d9f1
tests/functional/test_warning.py
tests/functional/test_warning.py
import pytest import textwrap @pytest.fixture def warnings_demo(tmpdir): demo = tmpdir.joinpath('warnings_demo.py') demo.write_text(textwrap.dedent(''' from logging import basicConfig from pip._internal.utils import deprecation deprecation.install_warning_logger() basicConfig() deprecation.deprecated("deprecated!", replacement=None, gone_in=None) ''')) return demo def test_deprecation_warnings_are_correct(script, warnings_demo): result = script.run('python', warnings_demo, expect_stderr=True) expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n' assert result.stderr == expected def test_deprecation_warnings_can_be_silenced(script, warnings_demo): script.environ['PYTHONWARNINGS'] = 'ignore' result = script.run('python', warnings_demo) assert result.stderr == ''
import textwrap import pytest @pytest.fixture def warnings_demo(tmpdir): demo = tmpdir.joinpath('warnings_demo.py') demo.write_text(textwrap.dedent(''' from logging import basicConfig from pip._internal.utils import deprecation deprecation.install_warning_logger() basicConfig() deprecation.deprecated("deprecated!", replacement=None, gone_in=None) ''')) return demo def test_deprecation_warnings_are_correct(script, warnings_demo): result = script.run('python', warnings_demo, expect_stderr=True) expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n' assert result.stderr == expected def test_deprecation_warnings_can_be_silenced(script, warnings_demo): script.environ['PYTHONWARNINGS'] = 'ignore' result = script.run('python', warnings_demo) assert result.stderr == ''
Sort imports for the greater good
Sort imports for the greater good
Python
mit
xavfernandez/pip,sbidoul/pip,pypa/pip,rouge8/pip,rouge8/pip,pfmoore/pip,xavfernandez/pip,sbidoul/pip,pypa/pip,pradyunsg/pip,xavfernandez/pip,pradyunsg/pip,pfmoore/pip,rouge8/pip
ad73789f74106a2d6014a2f737578494d2d21fbf
virtool/api/processes.py
virtool/api/processes.py
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/software_update") async def get_software_update(req): db = req.app["db"] document = await db.processes.find_one({"type": "software_update"}) return json_response(virtool.utils.base_processor(document)) @routes.get("/api/processes/hmm_install") async def get_hmm_install(req): db = req.app["db"] document = await db.processes.find_one({"type": "hmm_install"}) return json_response(virtool.utils.base_processor(document))
import virtool.http.routes import virtool.utils from virtool.api.utils import json_response routes = virtool.http.routes.Routes() @routes.get("/api/processes") async def find(req): db = req.app["db"] documents = [virtool.utils.base_processor(d) async for d in db.processes.find()] return json_response(documents) @routes.get("/api/processes/{process_id}") async def get(req): db = req.app["db"] process_id = req.match_info["process_id"] document = await db.processes.find_one(process_id) return json_response(virtool.utils.base_processor(document))
Remove specific process API GET endpoints
Remove specific process API GET endpoints
Python
mit
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
c18b8b5e545032ae512bad505255c0f72390b633
docs/conf.py
docs/conf.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import hgtools.managers # use hgtools to get the version hg_mgr = hgtools.managers.RepoManager.get_first_valid_manager() extensions = [ 'sphinx.ext.autodoc', ] # General information about the project. project = 'pytest-runner' copyright = '2015 Jason R. Coombs' # The short X.Y version. version = hg_mgr.get_current_version() # The full version, including alpha/beta/rc tags. release = version master_doc = 'index'
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import setuptools_scm extensions = [ 'sphinx.ext.autodoc', ] # General information about the project. project = 'pytest-runner' copyright = '2015 Jason R. Coombs' # The short X.Y version. version = setuptools_scm.get_version() # The full version, including alpha/beta/rc tags. release = version master_doc = 'index'
Use setuptools_scm in docs generation.
Use setuptools_scm in docs generation.
Python
mit
pytest-dev/pytest-runner
77ad68b04b66feb47116999cf79892f6630d9601
thefuck/rules/ln_no_hard_link.py
thefuck/rules/ln_no_hard_link.py
"""Suggest creating symbolic link if hard link is not allowed. Example: > ln barDir barLink ln: ‘barDir’: hard link not allowed for directory --> ln -s barDir barLink """ import re from thefuck.specific.sudo import sudo_support @sudo_support def match(command): return (command.stderr.endswith("hard link not allowed for directory") and command.script.startswith("ln ")) @sudo_support def get_new_command(command): return re.sub(r'^ln ', 'ln -s ', command.script)
# -*- coding: utf-8 -*- """Suggest creating symbolic link if hard link is not allowed. Example: > ln barDir barLink ln: ‘barDir’: hard link not allowed for directory --> ln -s barDir barLink """ import re from thefuck.specific.sudo import sudo_support @sudo_support def match(command): return (command.stderr.endswith("hard link not allowed for directory") and command.script.startswith("ln ")) @sudo_support def get_new_command(command): return re.sub(r'^ln ', 'ln -s ', command.script)
Fix encoding error in source file example
Fix encoding error in source file example
Python
mit
lawrencebenson/thefuck,mlk/thefuck,nvbn/thefuck,PLNech/thefuck,scorphus/thefuck,nvbn/thefuck,SimenB/thefuck,Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,scorphus/thefuck,lawrencebenson/thefuck,Clpsplug/thefuck,PLNech/thefuck
d7f3ea41bc3d252d786a339fc34337f01e1cc3eb
django_dbq/migrations/0001_initial.py
django_dbq/migrations/0001_initial.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid try: from django.db.models import UUIDField except ImportError: from django_dbq.fields import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import jsonfield.fields import uuid from django.db.models import UUIDField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Job', fields=[ ('id', UUIDField(serialize=False, editable=False, default=uuid.uuid4, primary_key=True)), ('created', models.DateTimeField(db_index=True, auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=100)), ('state', models.CharField(db_index=True, max_length=20, default='NEW', choices=[('NEW', 'NEW'), ('READY', 'READY'), ('PROCESSING', 'PROCESSING'), ('FAILED', 'FAILED'), ('COMPLETE', 'COMPLETE')])), ('next_task', models.CharField(max_length=100, blank=True)), ('workspace', jsonfield.fields.JSONField(null=True)), ('queue_name', models.CharField(db_index=True, max_length=20, default='default')), ], options={ 'ordering': ['-created'], }, ), ]
Remove reference to old UUIDfield in django migration
Remove reference to old UUIDfield in django migration
Python
bsd-2-clause
dabapps/django-db-queue
a5130e32bffa1dbc4d83f349fc3653b690154d71
vumi/workers/vas2nets/workers.py
vumi/workers/vas2nets/workers.py
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
# -*- test-case-name: vumi.workers.vas2nets.test_vas2nets -*- # -*- encoding: utf-8 -*- from twisted.python import log from twisted.internet.defer import inlineCallbacks, Deferred from vumi.message import Message from vumi.service import Worker class EchoWorker(Worker): @inlineCallbacks def startWorker(self): """called by the Worker class when the AMQP connections been established""" self.publisher = yield self.publish_to('sms.outbound.%(transport_name)s' % self.config) self.consumer = yield self.consume('sms.inbound.%(transport_name)s.%(shortcode)s' % self.config, self.handle_inbound_message) def handle_inbound_message(self, message): log.msg("Received: %s" % (message.payload,)) """Reply to the message with the same content""" data = message.payload reply = { 'to_msisdn': data['from_msisdn'], 'from_msisdn': data['to_msisdn'], 'message': data['message'], 'id': data['transport_message_id'], 'transport_network_id': data['transport_network_id'], 'transport_keyword': data['transport_keyword'], } return self.publisher.publish_message(Message(**reply)) def stopWorker(self): """shutdown""" pass
Add keyword to echo worker.
Add keyword to echo worker.
Python
bsd-3-clause
TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi
bbf22dc68202d81a8c7e94fbb8e61d819d808115
wisely_project/pledges/models.py
wisely_project/pledges/models.py
from django.utils import timezone from django.db import models from users.models import Course, BaseModel, User class Pledge(BaseModel): user = models.ForeignKey(User) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
from django.utils import timezone from django.db import models from users.models import Course, BaseModel, UserProfile class Pledge(BaseModel): user = models.ForeignKey(UserProfile) course = models.ForeignKey(Course) money = models.DecimalField(max_digits=8, decimal_places=2) pledge_date = models.DateTimeField('date pledged', default=timezone.now()) complete_date = models.DateTimeField('date completed', null=True) is_active = models.BooleanField(default=True) is_complete = models.BooleanField(default=False) class Follower(BaseModel): pledge = models.ForeignKey(Pledge) email = models.EmailField(default='', blank=True)
Make pledge foreignkey to userprofile
Make pledge foreignkey to userprofile
Python
mit
TejasM/wisely,TejasM/wisely,TejasM/wisely
8eca7b30865e4d02fd440f55ad3215dee6fab8a1
gee_asset_manager/batch_remover.py
gee_asset_manager/batch_remover.py
import fnmatch import logging import sys import ee def delete(asset_path): root = asset_path[:asset_path.rfind('/')] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
import fnmatch import logging import sys import ee def delete(asset_path): root_idx = asset_path.rfind('/') if root_idx == -1: logging.warning('Asset not found. Make sure you pass full asset name, e.g. users/pinkiepie/rainbow') sys.exit(1) root = asset_path[:root_idx] all_assets_names = [e['id'] for e in ee.data.getList({'id': root})] filtered_names = fnmatch.filter(all_assets_names, asset_path) if not filtered_names: logging.warning('Nothing to remove. Exiting.') sys.exit(1) else: for path in filtered_names: __delete_recursive(path) logging.info('Collection %s removed', path) def __delete_recursive(asset_path): info = ee.data.getInfo(asset_path) if not info: logging.warning('Nothing to delete.') sys.exit(1) elif info['type'] == 'Image': pass elif info['type'] == 'Folder': items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: logging.info('Removing items in %s folder', item['id']) delete(item['id']) else: items_in_destination = ee.data.getList({'id': asset_path}) for item in items_in_destination: ee.data.deleteAsset(item['id']) ee.data.deleteAsset(asset_path)
Add warning when removing an asset without full path
Add warning when removing an asset without full path
Python
apache-2.0
tracek/gee_asset_manager
0255a9cc22999d3111076155feab85ebe3198492
impeller/tools/build_metal_library.py
impeller/tools/build_metal_library.py
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import sys import argparse import errno import os import subprocess def MakeDirectories(path): try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def Main(): parser = argparse.ArgumentParser() parser.add_argument("--output", type=str, required=True, help="The location to generate the Metal library to.") parser.add_argument("--depfile", type=str, required=True, help="The location of the depfile.") parser.add_argument("--source", type=str, action="append", required=True, help="The source file to compile. Can be specified multiple times.") args = parser.parse_args() MakeDirectories(os.path.dirname(args.depfile)) command = [ "xcrun", "metal", "-MO", "-gline-tables-only", # Both user and system header will be tracked. "-MMD", "-MF", args.depfile, "-o", args.output ] command += args.source subprocess.check_call(command) if __name__ == '__main__': if sys.platform != 'darwin': raise Exception("This script only runs on Mac") Main()
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import sys import argparse import errno import os import subprocess def MakeDirectories(path): try: os.makedirs(path) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def Main(): parser = argparse.ArgumentParser() parser.add_argument("--output", type=str, required=True, help="The location to generate the Metal library to.") parser.add_argument("--depfile", type=str, required=True, help="The location of the depfile.") parser.add_argument("--source", type=str, action="append", required=True, help="The source file to compile. Can be specified multiple times.") args = parser.parse_args() MakeDirectories(os.path.dirname(args.depfile)) command = [ "xcrun", "metal", # TODO: Embeds both sources and driver options in the output. This aids in # debugging but should be removed from release builds. "-MO", "-gline-tables-only", # Both user and system header will be tracked. "-MMD", "-MF", args.depfile, "-o", args.output ] command += args.source subprocess.check_call(command) if __name__ == '__main__': if sys.platform != 'darwin': raise Exception("This script only runs on Mac") Main()
Add FIXME to address removing debug information from generated shaders.
Add FIXME to address removing debug information from generated shaders.
Python
bsd-3-clause
chinmaygarde/flutter_engine,rmacnak-google/engine,devoncarew/engine,devoncarew/engine,chinmaygarde/flutter_engine,devoncarew/engine,rmacnak-google/engine,rmacnak-google/engine,rmacnak-google/engine,flutter/engine,chinmaygarde/flutter_engine,chinmaygarde/flutter_engine,flutter/engine,chinmaygarde/flutter_engine,rmacnak-google/engine,flutter/engine,devoncarew/engine,rmacnak-google/engine,rmacnak-google/engine,flutter/engine,chinmaygarde/flutter_engine,flutter/engine,chinmaygarde/flutter_engine,devoncarew/engine,devoncarew/engine,flutter/engine,devoncarew/engine,flutter/engine,flutter/engine
18a874f312a57b4b9b7a5ce5cf9857585f0f0fef
truffe2/app/utils.py
truffe2/app/utils.py
def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: current_unit = Unit.objects.get(pk=1) return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
from django.conf import settings def add_current_unit(request): """Template context processor to add current unit""" return {'CURRENT_UNIT': get_current_unit(request)} def get_current_unit(request): """Return the current unit""" from units.models import Unit current_unit_pk = request.session.get('current_unit_pk', 1) try: current_unit = Unit.objects.get(pk=current_unit_pk) except Unit.DoesNotExist: try: current_unit = Unit.objects.get(pk=settings.ROOT_UNIT_PK) except: current_unit = None return current_unit def update_current_unit(request, unit_pk): """Update the current unit""" request.session['current_unit_pk'] = unit_pk
Fix error if no units
Fix error if no units
Python
bsd-2-clause
agepoly/truffe2,ArcaniteSolutions/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2,agepoly/truffe2,ArcaniteSolutions/truffe2
0a1358f27db3abb04032fac1b8a3da09d846d23e
oauth_provider/utils.py
oauth_provider/utils.py
import oauth.oauth as oauth from django.conf import settings from django.http import HttpResponse from stores import DataStore OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME' def initialize_server_request(request): """Shortcut for initialization.""" oauth_request = oauth.OAuthRequest.from_request(request.method, request.build_absolute_uri(), headers=request.META, parameters=dict(request.REQUEST.items()), query_string=request.environ.get('QUERY_STRING', '')) if oauth_request: oauth_server = oauth.OAuthServer(DataStore(oauth_request)) oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT()) oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1()) else: oauth_server = None return oauth_server, oauth_request def send_oauth_error(err=None): """Shortcut for sending an error.""" # send a 401 error response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain") response.status_code = 401 # return the authenticate header realm = getattr(settings, OAUTH_REALM_KEY_NAME, '') header = oauth.build_authenticate_header(realm=realm) for k, v in header.iteritems(): response[k] = v return response
import oauth.oauth as oauth from django.conf import settings from django.http import HttpResponse from stores import DataStore OAUTH_REALM_KEY_NAME = 'OAUTH_REALM_KEY_NAME' def initialize_server_request(request): """Shortcut for initialization.""" # Django converts Authorization header in HTTP_AUTHORIZATION # Warning: it doesn't happen in tests but it's useful, do not remove! auth_header = {} if 'Authorization' in request.META: auth_header = {'Authorization': request.META['Authorization']} elif 'HTTP_AUTHORIZATION' in request.META: auth_header = {'Authorization': request.META['HTTP_AUTHORIZATION']} oauth_request = oauth.OAuthRequest.from_request(request.method, request.build_absolute_uri(), headers=auth_header, parameters=dict(request.REQUEST.items()), query_string=request.environ.get('QUERY_STRING', '')) if oauth_request: oauth_server = oauth.OAuthServer(DataStore(oauth_request)) oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT()) oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1()) else: oauth_server = None return oauth_server, oauth_request def send_oauth_error(err=None): """Shortcut for sending an error.""" # send a 401 error response = HttpResponse(err.message.encode('utf-8'), mimetype="text/plain") response.status_code = 401 # return the authenticate header realm = getattr(settings, OAUTH_REALM_KEY_NAME, '') header = oauth.build_authenticate_header(realm=realm) for k, v in header.iteritems(): response[k] = v return response
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
Fix a bug introduced in the latest revision, testing auth header in initialize_server_request now, thanks Chris McMichael for the report and patch
Python
bsd-3-clause
e-loue/django-oauth-plus
1fa0eb2c792b3cc89d27b322c80548f022b7fbb9
api/base/exceptions.py
api/base/exceptions.py
from rest_framework.exceptions import APIException from rest_framework import status def jsonapi_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array with a 'detail' member """ from rest_framework.views import exception_handler response = exception_handler(exc, context) if response is not None: if 'detail' in response.data: response.data = {'errors': [response.data]} else: response.data = {'errors': [{'detail': response.data}]} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
from rest_framework import status from rest_framework.exceptions import APIException def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ from rest_framework.views import exception_handler response = exception_handler(exc, context) # Title removed to avoid clash with node "title" errors acceptable_members = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta'] errors = [] if response is not None: message = response.data if isinstance(message, dict): for key, value in message.iteritems(): if key in acceptable_members: errors.append({key: value}) else: errors.append({'detail': {key: value}}) elif isinstance(message, list): for error in message: errors.append({'detail': error}) else: errors.append({'detail': message}) response.data = {'errors': errors} return response # Custom Exceptions the Django Rest Framework does not support class Gone(APIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.')
Modify exception handler to cover multiple data types i.e. dict and list and handle when more than one error returned
Modify exception handler to cover multiple data types i.e. dict and list and handle when more than one error returned
Python
apache-2.0
monikagrabowska/osf.io,hmoco/osf.io,asanfilippo7/osf.io,njantrania/osf.io,sloria/osf.io,MerlinZhang/osf.io,acshi/osf.io,mluke93/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,ckc6cz/osf.io,GageGaskins/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,chennan47/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,mluo613/osf.io,acshi/osf.io,samanehsan/osf.io,baylee-d/osf.io,cosenal/osf.io,samchrisinger/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,arpitar/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,KAsante95/osf.io,sbt9uc/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,saradbowman/osf.io,abought/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,rdhyee/osf.io,mattclark/osf.io,adlius/osf.io,alexschiller/osf.io,arpitar/osf.io,kch8qx/osf.io,caseyrollins/osf.io,petermalcolm/osf.io,cslzchen/osf.io,pattisdr/osf.io,aaxelb/osf.io,Nesiehr/osf.io,brianjgeiger/osf.io,petermalcolm/osf.io,danielneis/osf.io,arpitar/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,abought/osf.io,icereval/osf.io,MerlinZhang/osf.io,sloria/osf.io,cosenal/osf.io,sbt9uc/osf.io,aaxelb/osf.io,mfraezz/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Nesiehr/osf.io,erinspace/osf.io,GageGaskins/osf.io,caneruguz/osf.io,RomanZWang/osf.io,njantrania/osf.io,felliott/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,felliott/osf.io,laurenrevere/osf.io,erinspace/osf.io,crcresearch/osf.io,crcresearch/osf.io,wearpants/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,emetsger/osf.io,billyhunt/osf.io,kch8qx/osf.io,Ghalko/osf.io,adlius/osf.io,aaxelb/osf.io,zachjanicki/osf.io,zamattiac/osf.io,caneruguz/osf.io,SSJohns/osf.io,caseyrollins/osf.io,mluo613/osf.io,samchrisinger/osf.io,cosenal/osf.io,abought/osf.io,kwierman/osf.io,rdhyee/osf.io,danielneis/osf.io,SSJohns/osf.io,wearpants/osf.io,sbt9uc/osf.io,haoyuchen1992/osf.io,emetsger/osf.io,Nesiehr/osf.io,pattisdr/osf.io,chrisseto/osf.io,samanehsan/osf.io,KAsante95/osf.io,binoculars/osf.io,brandonPurvis/osf.io,mluo613/osf.io,binoculars/osf.io,wearpants/osf.io,ckc6cz/osf.io,jnayak1/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,chrisseto/osf.io,adlius/osf.io,TomHeatwole/osf.io,emetsger/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,samanehsan/osf.io,mluke93/osf.io,chennan47/osf.io,asanfilippo7/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,acshi/osf.io,mluo613/osf.io,amyshi188/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,jmcarp/osf.io,amyshi188/osf.io,cwisecarver/osf.io,binoculars/osf.io,samchrisinger/osf.io,jnayak1/osf.io,billyhunt/osf.io,alexschiller/osf.io,acshi/osf.io,petermalcolm/osf.io,cslzchen/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,kch8qx/osf.io,cwisecarver/osf.io,jnayak1/osf.io,abought/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,alexschiller/osf.io,emetsger/osf.io,doublebits/osf.io,ticklemepierce/osf.io,mattclark/osf.io,danielneis/osf.io,ckc6cz/osf.io,mfraezz/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,doublebits/osf.io,kwierman/osf.io,RomanZWang/osf.io,MerlinZhang/osf.io,cwisecarver/osf.io,jmcarp/osf.io,kch8qx/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,zamattiac/osf.io,leb2dg/osf.io,sloria/osf.io,amyshi188/osf.io,jmcarp/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,hmoco/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,erinspace/osf.io,mluke93/osf.io,hmoco/osf.io,GageGaskins/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,alexschiller/osf.io,MerlinZhang/osf.io,arpitar/osf.io,Ghalko/osf.io,Nesiehr/osf.io,mfraezz/osf.io,mluo613/osf.io,leb2dg/osf.io,zachjanicki/osf.io,mattclark/osf.io,GageGaskins/osf.io,KAsante95/osf.io,mfraezz/osf.io,icereval/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,wearpants/osf.io,felliott/osf.io,danielneis/osf.io,brandonPurvis/osf.io,njantrania/osf.io,KAsante95/osf.io,alexschiller/osf.io,SSJohns/osf.io,zachjanicki/osf.io,SSJohns/osf.io,baylee-d/osf.io,doublebits/osf.io,DanielSBrown/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,jmcarp/osf.io,rdhyee/osf.io,kch8qx/osf.io,billyhunt/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,baylee-d/osf.io,felliott/osf.io,samanehsan/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,cosenal/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,laurenrevere/osf.io,mluke93/osf.io,chrisseto/osf.io,Ghalko/osf.io,acshi/osf.io,monikagrabowska/osf.io,njantrania/osf.io,zamattiac/osf.io,caneruguz/osf.io,billyhunt/osf.io,Ghalko/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,icereval/osf.io
4c1bf1757baa5beec50377724961c528f5985864
ptest/screencapturer.py
ptest/screencapturer.py
import threading import traceback import plogger __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break try: screen_shot = active_browser.get_screenshot_as_png() except Exception as e: plogger.warn("Failed to take the screenshot: \n%s\n%s" % (e.message, traceback.format_exc())) return current_thread.get_property("running_test_case_fixture").screen_shot = screen_shot else: pass # todo: take screen shot for desktop
import threading import traceback import StringIO import plogger try: from PIL import ImageGrab except ImportError: PIL_installed = False else: PIL_installed = True try: import wx except ImportError: wxpython_installed = False else: wxpython_installed = True __author__ = 'karl.gong' def take_screen_shot(): current_thread = threading.currentThread() active_browser = current_thread.get_property("browser") if active_browser is not None: while True: try: active_browser.switch_to.alert.dismiss() except Exception: break def capture_screen(): return active_browser.get_screenshot_as_png() elif PIL_installed: def capture_screen(): output = StringIO.StringIO() ImageGrab.grab().save(output, format="png") return output.getvalue() elif wxpython_installed: def capture_screen(): app = wx.App(False) screen = wx.ScreenDC() width, height = screen.GetSize() bmp = wx.EmptyBitmap(width, height) mem = wx.MemoryDC(bmp) mem.Blit(0, 0, width, height, screen, 0, 0) output = StringIO.StringIO() bmp.ConvertToImage().SaveStream(output, wx.BITMAP_TYPE_PNG) return output.getvalue() else: return try: current_thread.get_property("running_test_case_fixture").screen_shot = capture_screen() except Exception as e: plogger.warn("Failed to take the screenshot: \n%screen\n%screen" % (e.message, traceback.format_exc()))
Support capture screenshot for no-selenium test
Support capture screenshot for no-selenium test
Python
apache-2.0
KarlGong/ptest,KarlGong/ptest
c82f0f10ea8b96377ebed8a6859ff3cd8ed4cd3f
python/turbodbc/exceptions.py
python/turbodbc/exceptions.py
from __future__ import absolute_import from functools import wraps from exceptions import StandardError from turbodbc_intern import Error as InternError class Error(StandardError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
from __future__ import absolute_import from functools import wraps from turbodbc_intern import Error as InternError # Python 2/3 compatibility try: from exceptions import StandardError as _BaseError except ImportError: _BaseError = Exception class Error(_BaseError): pass class InterfaceError(Error): pass class DatabaseError(Error): pass def translate_exceptions(f): @wraps(f) def wrapper(*args, **kwds): try: return f(*args, **kwds) except InternError as e: raise DatabaseError(str(e)) return wrapper
Fix Python 2/3 exception base class compatibility
Fix Python 2/3 exception base class compatibility
Python
mit
blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc,blue-yonder/turbodbc
80f1ee23f85aee9a54e0c6cae7a30dddbe96541b
scorecard/tests/test_views.py
scorecard/tests/test_views.py
import json from infrastructure.models import FinancialYear from django.test import ( TransactionTestCase, Client, override_settings, ) from . import ( import_data, ) from .resources import ( GeographyResource, MunicipalityProfileResource, MedianGroupResource, RatingCountGroupResource, ) @override_settings( SITE_ID=2, STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage", ) class GeographyDetailViewTestCase(TransactionTestCase): serialized_rollback = True def test_context(self): # Import sample data import_data( GeographyResource, "views/scorecard_geography.csv", ) import_data( MunicipalityProfileResource, "views/municipality_profile.csv", ) import_data( MedianGroupResource, "views/median_group.csv", ) import_data( RatingCountGroupResource, "views/rating_count_group.csv", ) fy = FinancialYear.objects.create(budget_year="2019/2020") # Make request client = Client() response = client.get("/profiles/municipality-CPT-city-of-cape-town/") context = response.context page_data = json.loads(context["page_data_json"]) # Test for amount types self.assertIsInstance(page_data["amount_types_v1"], dict) # Test for cube names self.assertIsInstance(page_data["cube_names"], dict) # Test for municipality category descriptions self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
import json from django.test import ( TransactionTestCase, Client, override_settings, ) @override_settings( SITE_ID=2, STATICFILES_STORAGE="django.contrib.staticfiles.storage.StaticFilesStorage", ) class GeographyDetailViewTestCase(TransactionTestCase): serialized_rollback = True fixtures = ["seeddata", "demo-data", "compiled_profile"] def test_context(self): # Make request client = Client() response = client.get("/profiles/municipality-BUF-buffalo-city/") context = response.context page_data = json.loads(context["page_data_json"]) # Test for amount types self.assertIsInstance(page_data["amount_types_v1"], dict) # Test for cube names self.assertIsInstance(page_data["cube_names"], dict) # Test for municipality category descriptions self.assertIsInstance(page_data["municipal_category_descriptions"], dict)
Use new fixtures for geography views test
Use new fixtures for geography views test
Python
mit
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
4666849791cad70ae1bb907a2dcc35ccfc0b7de4
backend/populate_dimkarakostas.py
backend/populate_dimkarakostas.py
from string import ascii_lowercase import django import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target, Victim endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s' prefix = 'imper' alphabet = ascii_lowercase secretlength = 9 target_1 = Target( endpoint=endpoint, prefix=prefix, alphabet=alphabet, secretlength=secretlength ) target_1.save() print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength) snifferendpoint = 'http://127.0.0.1:9000' sourceip = '192.168.1.70' victim_1 = Victim( target=target_1, snifferendpoint=snifferendpoint, sourceip=sourceip, # method='serial' ) victim_1.save() print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
from string import ascii_lowercase import django import os import string os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') django.setup() from breach.models import Target, Victim endpoint = 'https://dimkarakostas.com/rupture/test.php?ref=%s' prefix = 'imper' alphabet = ascii_lowercase secretlength = 9 target_1 = Target( endpoint=endpoint, prefix=prefix, alphabet=alphabet, secretlength=secretlength, alignmentalphabet=string.ascii_uppercase ) target_1.save() print 'Created Target:\n\tendpoint: {}\n\tprefix: {}\n\talphabet: {}\n\tsecretlength: {}'.format(endpoint, prefix, alphabet, secretlength) snifferendpoint = 'http://127.0.0.1:9000' sourceip = '192.168.1.70' victim_1 = Victim( target=target_1, snifferendpoint=snifferendpoint, sourceip=sourceip, # method='serial' ) victim_1.save() print 'Created Victim:\n\tvictim_id: {}\n\tsnifferendpoint: {}\n\tsourceip: {}'.format(victim_1.id, snifferendpoint, sourceip)
Update dimkarakostas population with alignmentalphabet
Update dimkarakostas population with alignmentalphabet
Python
mit
esarafianou/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture
1b74dc0288d1f3349e5045f6791c8495435c961d
controlers/errors.py
controlers/errors.py
'''Copyright(C): Leaf Johnson 2011 This file is part of makeclub. makeclub is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. makeclub is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Affero General Public License along with makeclub. If not, see <http://www.gnu.org/licenses/>. ''' from google.appengine.ext.webapp import template import os.path tplt = os.path.join(os.path.dirname(__file__), '../templates/default/errors.html') def renderErrorPage(msg, redirect=''): vars = dict(msg=msg, redirect=redirect) return template.render(tplt, vars) def errorPage(msg, redirect, response): response.out.write (renderErrorPage(msg, redirect)) return False
'''Copyright(C): Leaf Johnson 2011 This file is part of makeclub. makeclub is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. makeclub is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU Affero General Public License along with makeclub. If not, see <http://www.gnu.org/licenses/>. ''' from google.appengine.ext.webapp import template import os.path tplt = os.path.join(os.path.dirname(__file__), '../templates/default/errors.html') def renderErrorPage(msg, redirect=''): vars = dict(message=msg, redirect=redirect) return template.render(tplt, vars) def errorPage(msg, redirect, response): response.out.write (renderErrorPage(msg, redirect)) return False
Fix bug - error page could display message now
Fix bug - error page could display message now
Python
agpl-3.0
cardmaster/makeclub,cardmaster/makeclub,cardmaster/makeclub
8abdce9c60c9d2ead839e0065d35128ec16a82a1
chatterbot/__main__.py
chatterbot/__main__.py
import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import nltk.data print('\n'.join(nltk.data.path))
import sys if __name__ == '__main__': import chatterbot if '--version' in sys.argv: print(chatterbot.__version__) if 'list_nltk_data' in sys.argv: import os import nltk.data data_directories = [] # Find each data directory in the NLTK path that has content for path in nltk.data.path: if os.path.exists(path): if os.listdir(path): data_directories.append(path) print(os.linesep.join(data_directories))
Add commad line utility to find NLTK data
Add commad line utility to find NLTK data
Python
bsd-3-clause
gunthercox/ChatterBot,vkosuri/ChatterBot
210c7b7fb421a7c083b9d292370b15c0ece17fa7
source/bark/__init__.py
source/bark/__init__.py
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handle = Distribute()
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. from .handler.distribute import Distribute #: Top level handler responsible for relaying all logs to other handlers. handler = Distribute() handlers = handler.handlers handle = handler.handle
Correct handler reference variable name and add convenient accessors.
Correct handler reference variable name and add convenient accessors.
Python
apache-2.0
4degrees/mill,4degrees/sawmill
c1f7544138990dc6dbb05090711d57e6fea36fb4
pava/implementation/tests/__init__.py
pava/implementation/tests/__init__.py
import unittest import arrays ArrayTest = arrays.ArrayTest class PavaTest(unittest.TestCase): def test_general(self): pass if __name__ == "__main__": unittest.main()
Create all.py to run all tests
Create all.py to run all tests
Python
mit
laffra/pava,laffra/pava
696a79069ad1db1caee4d6da0c3c48dbd79f9157
sqliteschema/_logger.py
sqliteschema/_logger.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import absolute_import from __future__ import unicode_literals import logbook import pytablewriter import simplesqlite logger = logbook.Logger("sqliteschema") logger.disable() def set_logger(is_enable): pytablewriter.set_logger(is_enable=is_enable) simplesqlite.set_logger(is_enable=is_enable) if is_enable: logger.enable() else: logger.disable() def set_log_level(log_level): """ Set logging level of this module. Using `logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging. :param int log_level: One of the log level of `logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__. Disabled logging if ``log_level`` is ``logbook.NOTSET``. """ pytablewriter.set_log_level(log_level) simplesqlite.set_log_level(log_level) if log_level == logbook.NOTSET: set_logger(is_enable=False) else: set_logger(is_enable=True) logger.level = log_level
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import absolute_import from __future__ import unicode_literals import logbook import pytablewriter import simplesqlite logger = logbook.Logger("sqliteschema") logger.disable() def set_logger(is_enable): if is_enable != logger.disabled: return if is_enable: logger.enable() else: logger.disable() pytablewriter.set_logger(is_enable=is_enable) simplesqlite.set_logger(is_enable=is_enable) def set_log_level(log_level): """ Set logging level of this module. Using `logbook <http://logbook.readthedocs.io/en/stable/>`__ module for logging. :param int log_level: One of the log level of `logbook <http://logbook.readthedocs.io/en/stable/api/base.html>`__. Disabled logging if ``log_level`` is ``logbook.NOTSET``. """ if log_level == logger.level: return if log_level == logbook.NOTSET: set_logger(is_enable=False) else: set_logger(is_enable=True) logger.level = log_level pytablewriter.set_log_level(log_level) simplesqlite.set_log_level(log_level)
Modify to avoid excessive logger initialization
Modify to avoid excessive logger initialization
Python
mit
thombashi/sqliteschema