commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
2b58318ad7134a8c894b70918520a89b51a2d6dd
cla_backend/apps/reports/tests/test_utils.py
cla_backend/apps/reports/tests/test_utils.py
import mock import os from boto.s3.connection import S3Connection from django.test import TestCase, override_settings from reports.utils import get_s3_connection class UtilsTestCase(TestCase): @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002") def test_get_s3_connection(self): envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"} with mock.patch.dict(os.environ, envs): conn = get_s3_connection() self.assertIsInstance(conn, S3Connection)
import mock import os from boto.s3.connection import S3Connection from django.test import TestCase, override_settings from reports.utils import get_s3_connection class UtilsTestCase(TestCase): @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com") def test_get_s3_connection(self): envs = {"S3_USE_SIGV4": "True"} with mock.patch.dict(os.environ, envs): conn = get_s3_connection() self.assertIsInstance(conn, S3Connection)
Modify s3 connection test for new AWS_S3_HOST setting
Modify s3 connection test for new AWS_S3_HOST setting The value is now calculated from the env var at load time, so mocking the env var value is not effective (cherry picked from commit 044219df7123e3a03a38cc06c9e8e8e9e80b0cbe)
Python
mit
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
--- +++ @@ -7,9 +7,9 @@ class UtilsTestCase(TestCase): - @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002") + @override_settings(AWS_ACCESS_KEY_ID="000000000001", AWS_SECRET_ACCESS_KEY="000000000002", AWS_S3_HOST="s3.eu-west-2.amazonaws.com") def test_get_s3_connection(self): - envs = {"AWS_S3_HOST": "s3.eu-west-2.amazonaws.com", "S3_USE_SIGV4": "True"} + envs = {"S3_USE_SIGV4": "True"} with mock.patch.dict(os.environ, envs): conn = get_s3_connection() self.assertIsInstance(conn, S3Connection)
dfd9d6c010083893814cfbc9cc4953ac8f785ecf
forge/setup.py
forge/setup.py
from setuptools import setup setup( name='mdf_forge', version='0.5.0', packages=['mdf_forge'], description='Materials Data Facility python package', long_description="Forge is the Materials Data Facility Python package to interface and leverage the MDF Data Discovery service. Forge allows users to perform simple queries and facilitiates moving and synthesizing results.", install_requires=[ "globus-sdk>=1.1.1", "requests>=2.18.1", "tqdm>=4.14.0", "six>=1.10.0" ], python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2", "Topic :: Scientific/Engineering" ], keywords=[ "MDF", "Materials Data Facility", "materials science", "dft", "data discovery", "supercomputing", "light sources" ], license="Apache License, Version 2.0", url="https://github.com/materials-data-facility/forge" )
from setuptools import setup setup( name='mdf_forge', version='0.4.1', packages=['mdf_forge'], description='Materials Data Facility python package', long_description="Forge is the Materials Data Facility Python package to interface and leverage the MDF Data Discovery service. Forge allows users to perform simple queries and facilitiates moving and synthesizing results.", install_requires=[ "globus-sdk>=1.1.1", "requests>=2.18.1", "tqdm>=4.14.0", "six>=1.10.0" ], python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 2", "Topic :: Scientific/Engineering" ], keywords=[ "MDF", "Materials Data Facility", "materials science", "dft", "data discovery", "supercomputing", "light sources" ], license="Apache License, Version 2.0", url="https://github.com/materials-data-facility/forge" )
Change version number for release
Change version number for release
Python
apache-2.0
materials-data-facility/forge
--- +++ @@ -2,7 +2,7 @@ setup( name='mdf_forge', - version='0.5.0', + version='0.4.1', packages=['mdf_forge'], description='Materials Data Facility python package', long_description="Forge is the Materials Data Facility Python package to interface and leverage the MDF Data Discovery service. Forge allows users to perform simple queries and facilitiates moving and synthesizing results.",
9477478f81315edcc0e5859b2325ea70694ea2be
lemon/sitemaps/views.py
lemon/sitemaps/views.py
from django.shortcuts import render from django.utils.translation import get_language from lemon.sitemaps.models import Item def sitemap_xml(request): qs = Item.objects.filter(sites=request.site, enabled=True, language=get_language()) return render(request, 'sitemaps/sitemap.xml', {'object_list': qs}, content_type='application/xml')
from django.shortcuts import render from lemon.sitemaps.models import Item def sitemap_xml(request): qs = Item.objects.filter(sites=request.site, enabled=True) return render(request, 'sitemaps/sitemap.xml', {'object_list': qs}, content_type='application/xml')
Remove language filtration in sitemap.xml
Remove language filtration in sitemap.xml
Python
bsd-3-clause
trilan/lemon,trilan/lemon,trilan/lemon
--- +++ @@ -1,10 +1,9 @@ from django.shortcuts import render -from django.utils.translation import get_language from lemon.sitemaps.models import Item def sitemap_xml(request): - qs = Item.objects.filter(sites=request.site, enabled=True, language=get_language()) + qs = Item.objects.filter(sites=request.site, enabled=True) return render(request, 'sitemaps/sitemap.xml', {'object_list': qs}, content_type='application/xml')
6bf26f15855ee6e13e11a2b026ee90b9302a68a7
PyFVCOM/__init__.py
PyFVCOM/__init__.py
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.6.1' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.6.1' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = '[email protected]' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results from PyFVCOM import plot
Add a better name for the coordinate functions. Eventually, ll2utm will be deprecated.
Add a better name for the coordinate functions. Eventually, ll2utm will be deprecated.
Python
mit
pwcazenave/PyFVCOM
--- +++ @@ -20,6 +20,7 @@ from PyFVCOM import current_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm +from PyFVCOM import ll2utm as coordinate_tools from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools
751c38ebe052a689b7962491ffd5f54b593da397
harvesting/datahub.io/fix-urls.py
harvesting/datahub.io/fix-urls.py
import sys fix_url = sys.argv[1] for line in sys.stdin: e = line.strip().split(" ") if e[0].startswith("_:"): e[0] = "<%s>" % e[0].replace("_:",fix_url) if e[2].startswith("_:"): e[2] = "<%s>" % e[2].replace("_:",fix_url) print(" ".join(e))
import sys fix_url = sys.argv[1] dct = "<http://purl.org/dc/terms/" dcelems = ["contributor", "coverage>", "creator>", "date>", "description>", "format>", "identifier>", "language>", "publisher>", "relation>", "rights>", "source>", "subject>", "title>", "type>"] for line in sys.stdin: e = line.strip().split(" ") if e[0].startswith("_:"): e[0] = "<%s>" % e[0].replace("_:",fix_url) if e[1].startswith(dct) and e[1][len(dct):] in dcelems: e[1] = "<http://purl.org/dc/elements/1.1/" + e[1][len(dct):] if e[2].startswith("_:"): e[2] = "<%s>" % e[2].replace("_:",fix_url) print(" ".join(e))
Fix datathub DCT uris to DC
Fix datathub DCT uris to DC
Python
apache-2.0
liderproject/linghub,liderproject/linghub,liderproject/linghub,liderproject/linghub
--- +++ @@ -1,12 +1,18 @@ import sys fix_url = sys.argv[1] +dct = "<http://purl.org/dc/terms/" +dcelems = ["contributor", "coverage>", "creator>", "date>", "description>", + "format>", "identifier>", "language>", "publisher>", "relation>", + "rights>", "source>", "subject>", "title>", "type>"] for line in sys.stdin: e = line.strip().split(" ") if e[0].startswith("_:"): e[0] = "<%s>" % e[0].replace("_:",fix_url) + if e[1].startswith(dct) and e[1][len(dct):] in dcelems: + e[1] = "<http://purl.org/dc/elements/1.1/" + e[1][len(dct):] if e[2].startswith("_:"): e[2] = "<%s>" % e[2].replace("_:",fix_url) print(" ".join(e))
e8d321c35d6e0a8294e0766c3836efe192ae2df0
print_items_needing_requeue.py
print_items_needing_requeue.py
""" Walks through your greader-logs directory (or directory containing them) and prints every item_name that has been finished but has no valid .warc.gz (as determined by greader-warc-checker's .verification logs) """ import os import sys try: import simplejson as json except ImportError: import json basename = os.path.basename def main(): basedirs = sys.argv[1:] valids = set() invalids = set() for basedir in basedirs: for directory, dirnames, filenames in os.walk(basedir): if basename(directory).startswith("."): print "Skipping dotdir %r" % (directory,) continue for f in filenames: if f.startswith("."): print "Skipping dotfile %r" % (f,) continue fname = os.path.join(directory, f) if fname.endswith(".verification"): with open(fname, "rb") as fh: for line in fh: data = json.loads(line) if data["valid"]: valids.add(data["item_name"]) else: invalids.add(data["item_name"]) needs_requeue = sorted(invalids - valids) for item_name in needs_requeue: print item_name if __name__ == '__main__': main()
""" Walks through your greader-logs directory (or directory containing them) and prints every item_name that has been finished but has no valid .warc.gz (as determined by greader-warc-checker's .verification logs) """ import os import sys try: import simplejson as json except ImportError: import json basename = os.path.basename def main(): greader_items = sys.argv[1] basedirs = sys.argv[2:] assert basedirs, "Give me some basedirs containing .verification files" valids = set() invalids = set() largest = 0 for basedir in basedirs: for directory, dirnames, filenames in os.walk(basedir): if basename(directory).startswith("."): continue for f in filenames: if f.startswith("."): continue fname = os.path.join(directory, f) if fname.endswith(".verification"): with open(fname, "rb") as fh: for line in fh: data = json.loads(line) if data["valid"]: valids.add(data["item_name"]) else: invalids.add(data["item_name"]) largest = max(largest, int(data["item_name"], 10)) for n in xrange(largest): item_name = str(n).zfill(10) if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'): print item_name if __name__ == '__main__': main()
Print items that are bad *or* missing
Print items that are bad *or* missing
Python
mit
ludios/greader-warc-checker
--- +++ @@ -14,18 +14,19 @@ basename = os.path.basename def main(): - basedirs = sys.argv[1:] + greader_items = sys.argv[1] + basedirs = sys.argv[2:] + assert basedirs, "Give me some basedirs containing .verification files" valids = set() invalids = set() + largest = 0 for basedir in basedirs: for directory, dirnames, filenames in os.walk(basedir): if basename(directory).startswith("."): - print "Skipping dotdir %r" % (directory,) continue for f in filenames: if f.startswith("."): - print "Skipping dotfile %r" % (f,) continue fname = os.path.join(directory, f) @@ -38,10 +39,12 @@ valids.add(data["item_name"]) else: invalids.add(data["item_name"]) + largest = max(largest, int(data["item_name"], 10)) - needs_requeue = sorted(invalids - valids) - for item_name in needs_requeue: - print item_name + for n in xrange(largest): + item_name = str(n).zfill(10) + if not item_name in valids and os.path.exists(greader_items + '/' + item_name[:6] + '/' + item_name + '.gz'): + print item_name if __name__ == '__main__':
400c8de8a3a714da21c0e2b175c6e4adad3677b9
syft/__init__.py
syft/__init__.py
import importlib import pkgutil ignore_packages = set(['test']) def import_submodules(package, recursive=True): """ Import all submodules of a module, recursively, including subpackages :param package: package (name or actual module) :type package: str | module :rtype: dict[str, types.ModuleType] """ if isinstance(package, str): package = importlib.import_module(package) results = {} for loader, name, is_pkg in pkgutil.walk_packages(package.__path__): if(name not in ignore_packages): full_name = package.__name__ + '.' + name results[full_name] = importlib.import_module(full_name) if recursive and is_pkg: results.update(import_submodules(full_name)) return results # import submodules recursively import_submodules(__name__)
import importlib import pkgutil ignore_packages = set(['test']) def import_submodules(package, recursive=True): """ Import all submodules of a module, recursively, including subpackages :param package: package (name or actual module) :type package: str | module :rtype: dict[str, types.ModuleType] """ if isinstance(package, str): package = importlib.import_module(package) results = {} for loader, name, is_pkg in pkgutil.walk_packages(package.__path__): # test submodule names are 'syft.test.*', so this matches the 'ignore_packages' above if name.split('.')[1] not in ignore_packages: full_name = package.__name__ + '.' + name results[full_name] = importlib.import_module(full_name) if recursive and is_pkg: results.update(import_submodules(full_name)) return results # import submodules recursively import_submodules(__name__)
Check for the name of the submodule we'd like to ignore in a more general way.
Check for the name of the submodule we'd like to ignore in a more general way.
Python
apache-2.0
aradhyamathur/PySyft,sajalsubodh22/PySyft,OpenMined/PySyft,dipanshunagar/PySyft,sajalsubodh22/PySyft,dipanshunagar/PySyft,joewie/PySyft,cypherai/PySyft,cypherai/PySyft,joewie/PySyft,aradhyamathur/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
--- +++ @@ -14,7 +14,8 @@ package = importlib.import_module(package) results = {} for loader, name, is_pkg in pkgutil.walk_packages(package.__path__): - if(name not in ignore_packages): + # test submodule names are 'syft.test.*', so this matches the 'ignore_packages' above + if name.split('.')[1] not in ignore_packages: full_name = package.__name__ + '.' + name results[full_name] = importlib.import_module(full_name) if recursive and is_pkg:
108fa65760ed6334181d7ed5b129a2e8e24c38d2
dsub/_dsub_version.py
dsub/_dsub_version.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.3.dev0'
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.3'
Update dsub version to 0.3.3
Update dsub version to 0.3.3 PiperOrigin-RevId: 266979768
Python
apache-2.0
DataBiosphere/dsub,DataBiosphere/dsub
--- +++ @@ -26,4 +26,4 @@ 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ -DSUB_VERSION = '0.3.3.dev0' +DSUB_VERSION = '0.3.3'
849b9eb93220af324343facb5f83d112de952fa0
mpltools/util.py
mpltools/util.py
import matplotlib.pyplot as plt __all__ = ['figure', 'figsize'] def figure(aspect_ratio=1.3, scale=1, width=None, *args, **kwargs): """Return matplotlib figure window. Parameters ---------- aspect_ratio : float Aspect ratio, width / height, of figure. scale : float Scale default size of the figure. width : float Figure width in inches. If None, default to rc parameters. See Also -------- figsize """ assert 'figsize' not in kwargs size = figsize(aspect_ratio=aspect_ratio, scale=scale, width=width) return plt.figure(figsize=size, *args, **kwargs) def figsize(aspect_ratio=1.3, scale=1, width=None): """Return figure size (width, height) in inches. Parameters ---------- aspect_ratio : float Aspect ratio, width / height, of figure. scale : float Scale default size of the figure. width : float Figure width in inches. If None, default to rc parameters. """ if width is None: width, h = plt.rcParams['figure.figsize'] height = width / aspect_ratio size = (width * scale, height * scale) return size
import matplotlib.pyplot as plt __all__ = ['figure', 'figsize'] def figure(aspect_ratio=1.3, scale=1, width=None, *args, **kwargs): """Return matplotlib figure window. Calculate figure height using `aspect_ratio` and *default* figure width. Parameters ---------- aspect_ratio : float Aspect ratio, width / height, of figure. scale : float Scale default size of the figure. width : float Figure width in inches. If None, default to rc parameters. See Also -------- figsize """ assert 'figsize' not in kwargs size = figsize(aspect_ratio=aspect_ratio, scale=scale, width=width) return plt.figure(figsize=size, *args, **kwargs) def figsize(aspect_ratio=1.3, scale=1, width=None): """Return figure size (width, height) in inches. Calculate figure height using `aspect_ratio` and *default* figure width. Parameters ---------- aspect_ratio : float Aspect ratio, width / height, of figure. scale : float Scale default size of the figure. width : float Figure width in inches. If None, default to rc parameters. """ if width is None: width, h = plt.rcParams['figure.figsize'] height = width / aspect_ratio size = (width * scale, height * scale) return size
Add note to docstring of `figure` and `figsize`.
ENH: Add note to docstring of `figure` and `figsize`.
Python
bsd-3-clause
tonysyu/mpltools,matteoicardi/mpltools
--- +++ @@ -6,6 +6,8 @@ def figure(aspect_ratio=1.3, scale=1, width=None, *args, **kwargs): """Return matplotlib figure window. + + Calculate figure height using `aspect_ratio` and *default* figure width. Parameters ---------- @@ -29,6 +31,8 @@ def figsize(aspect_ratio=1.3, scale=1, width=None): """Return figure size (width, height) in inches. + Calculate figure height using `aspect_ratio` and *default* figure width. + Parameters ---------- aspect_ratio : float
fef12d2a5cce5c1db488a4bb11b9c21b83a66cab
avocado/export/_json.py
avocado/export/_json.py
import json import inspect from _base import BaseExporter class JSONGeneratorEncoder(json.JSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
import inspect from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj): return list(obj) return super(JSONGeneratorEncoder, self).default(obj) class JSONExporter(BaseExporter): file_extension = 'json' content_type = 'application/json' preferred_formats = ('number', 'string') def write(self, iterable, buff=None): buff = self.get_file_obj(buff) encoder = JSONGeneratorEncoder() for chunk in encoder.iterencode(self.read(iterable)): buff.write(chunk) return buff
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
Update JSONGeneratorEncoder to subclass DjangoJSONEncoder This handles Decimals and datetimes
Python
bsd-2-clause
murphyke/avocado,murphyke/avocado,murphyke/avocado,murphyke/avocado
--- +++ @@ -1,9 +1,9 @@ -import json import inspect +from django.core.serializers.json import DjangoJSONEncoder from _base import BaseExporter -class JSONGeneratorEncoder(json.JSONEncoder): +class JSONGeneratorEncoder(DjangoJSONEncoder): "Handle generator objects and expressions." def default(self, obj): if inspect.isgenerator(obj):
f7fac123bf72af01272bc27a1dfabb788f611908
bandit/backends/smtp.py
bandit/backends/smtp.py
from __future__ import unicode_literals from django.core.mail.backends.smtp import EmailBackend as SMTPBackend from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin class HijackSMTPBackend(HijackBackendMixin, SMTPBackend): """ This backend intercepts outgoing messages drops them to a single email address. """ pass class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend): """ This backend intercepts outgoing messages and logs them, allowing only messages destined for ADMINS to be sent via SMTP. """ pass
from __future__ import unicode_literals from django.core.mail.backends.smtp import EmailBackend as SMTPBackend from bandit.backends.base import HijackBackendMixin, LogOnlyBackendMixin class HijackSMTPBackend(HijackBackendMixin, SMTPBackend): """ This backend intercepts outgoing messages drops them to a single email address. """ pass class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend): """ This backend intercepts outgoing messages and logs them, allowing only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or BANDIT_WHITELIST to be sent via SMTP. """ pass
Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent.
Update LogOnlySMTPBackend docstring. Not only admin emails are allowed, all approved emails are still sent.
Python
bsd-3-clause
caktus/django-email-bandit,caktus/django-email-bandit
--- +++ @@ -16,6 +16,7 @@ class LogOnlySMTPBackend(LogOnlyBackendMixin, SMTPBackend): """ This backend intercepts outgoing messages and logs them, allowing - only messages destined for ADMINS to be sent via SMTP. + only messages destined for ADMINS, BANDIT_EMAIL, SERVER_EMAIL, or + BANDIT_WHITELIST to be sent via SMTP. """ pass
527593c5f183054e330894e6b7161e24cca265a5
lily/notes/factories.py
lily/notes/factories.py
import random import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
import random from datetime import datetime import pytz import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory from faker.factory import Factory from lily.accounts.factories import AccountFactory from lily.contacts.factories import ContactFactory from lily.users.factories import LilyUserFactory from .models import Note faker = Factory.create('nl_NL') class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) @factory.lazy_attribute def subject(self): SubjectFactory = random.choice([AccountFactory, ContactFactory]) return SubjectFactory(tenant=self.tenant) class Meta: model = Note
Fix so testdata can be loaded when setting up local environment
Fix so testdata can be loaded when setting up local environment
Python
agpl-3.0
HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily,HelloLily/hellolily
--- +++ @@ -1,5 +1,7 @@ import random +from datetime import datetime +import pytz import factory from factory.declarations import SubFactory, SelfAttribute, LazyAttribute from factory.django import DjangoModelFactory @@ -18,6 +20,7 @@ class NoteFactory(DjangoModelFactory): content = LazyAttribute(lambda o: faker.text()) author = SubFactory(LilyUserFactory, tenant=SelfAttribute('..tenant')) + sort_by_date = LazyAttribute(lambda o: datetime.now(tz=pytz.utc)) @factory.lazy_attribute def subject(self):
245dd2ef403cd88aebf5dd8923585a9e0489dd97
mongoalchemy/util.py
mongoalchemy/util.py
# The MIT License # # Copyright (c) 2010 Jeffrey Jenkins # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. def classproperty(fun): class Descriptor(property): def __get__(self, instance, owner): return fun(owner) return Descriptor() class UNSET(object): def __repr__(self): return 'UNSET' def __eq__(self, other): return other.__class__ == self.__class__ UNSET = UNSET()
# The MIT License # # Copyright (c) 2010 Jeffrey Jenkins # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. def classproperty(fun): class Descriptor(property): def __get__(self, instance, owner): return fun(owner) return Descriptor() class UNSET(object): def __repr__(self): return 'UNSET' def __eq__(self, other): return other.__class__ == self.__class__ def __nonzero__(self): return False UNSET = UNSET()
Change UNSET to so bool(UNSET) is False.
Change UNSET to so bool(UNSET) is False.
Python
mit
shakefu/MongoAlchemy,shakefu/MongoAlchemy,shakefu/MongoAlchemy
--- +++ @@ -31,5 +31,7 @@ return 'UNSET' def __eq__(self, other): return other.__class__ == self.__class__ + def __nonzero__(self): + return False UNSET = UNSET()
3b50b38ff71c2a35376eccfffbba700815868e68
massa/default_config.py
massa/default_config.py
# -*- coding: utf-8 -*- DEBUG = True SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False
# -*- coding: utf-8 -*- DEBUG = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False
Disable debug mode in default configuration.
Disable debug mode in default configuration.
Python
mit
jaapverloop/massa
--- +++ @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -DEBUG = True +DEBUG = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False
ba98874be9370ec49c2c04e89d456f723b5d083c
monitoring/test/test_data/exceptions.py
monitoring/test/test_data/exceptions.py
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from monascaclient.openstack.common.apiclient import exceptions as monascacli from openstack_dashboard.test.test_data import exceptions def data(TEST): TEST.exceptions = exceptions.data monitoring_exception = monascacli.ClientException TEST.exceptions.monitoring = exceptions.create_stubbed_exception( monitoring_exception)
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # NOTE(dmllr): Remove me when we require monascaclient >= 1.3.0 try: from monascaclient.apiclient import exceptions as monascacli except ImportError: from monascaclient.openstack.common.apiclient import exceptions as monascacli from openstack_dashboard.test.test_data import exceptions def data(TEST): TEST.exceptions = exceptions.data monitoring_exception = monascacli.ClientException TEST.exceptions.monitoring = exceptions.create_stubbed_exception( monitoring_exception)
Adjust tests for python-monascaclient >= 1.3.0
Adjust tests for python-monascaclient >= 1.3.0 the exceptions module was moved out of the openstack.common namespace, so try to import the new location first and fall back to the old one if it doesn't exist. Change-Id: I3305775baaab15dca8d5e7e5cfc0932f94d4d153
Python
apache-2.0
openstack/monasca-ui,openstack/monasca-ui,openstack/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,stackforge/monasca-ui,openstack/monasca-ui
--- +++ @@ -11,7 +11,12 @@ # License for the specific language governing permissions and limitations # under the License. -from monascaclient.openstack.common.apiclient import exceptions as monascacli +# NOTE(dmllr): Remove me when we require monascaclient >= 1.3.0 +try: + from monascaclient.apiclient import exceptions as monascacli +except ImportError: + from monascaclient.openstack.common.apiclient import exceptions as monascacli + from openstack_dashboard.test.test_data import exceptions
8ef41f9ac8ec8a7b7fc9e63b2ff6453782c41d62
demo/__init__.py
demo/__init__.py
"""Package for PythonTemplateDemo.""" __project__ = 'PythonTemplateDemo' __version__ = '0.0.0' VERSION = __project__ + '-' + __version__
"""Package for PythonTemplateDemo.""" __project__ = 'PythonTemplateDemo' __version__ = '0.0.0' VERSION = __project__ + '-' + __version__ PYTHON_VERSION = 3, 4 import sys if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test) exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
Deploy Travis CI build 381 to GitHub
Deploy Travis CI build 381 to GitHub
Python
mit
jacebrowning/template-python-demo
--- +++ @@ -4,3 +4,9 @@ __version__ = '0.0.0' VERSION = __project__ + '-' + __version__ + +PYTHON_VERSION = 3, 4 + +import sys +if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test) + exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
db1aaa7f7e28c901b2b427236f9942aa78d5ae34
taemin/plugins/cafe/plugin.py
taemin/plugins/cafe/plugin.py
#!/usr/bin/env python2 # -*- coding: utf8 -*- from taemin import plugin class TaeminCafe(plugin.TaeminPlugin): helper = {"all": "Envoie un message à tout le monde", "cafe": "Appelle tout le monde pour prendre un café ;)"} def on_pubmsg(self, msg): if msg.key not in ("all", "cafe"): return chan = msg.chan.name message = " ".join([user.name for user in self.taemin.list_users(msg.chan)]) if msg.key == "cafe": message = "<<< CAFE !!! \\o/ %s \\o/ !!! CAFE >>>" % message else: message = "%s %s" % (message, msg.value) self.privmsg(chan, message)
#!/usr/bin/env python2 # -*- coding: utf8 -*- from taemin import plugin class TaeminCafe(plugin.TaeminPlugin): helper = {"all": "Envoie un message à tout le monde", "cafe": "Appelle tout le monde pour prendre un café ;)"} def on_pubmsg(self, msg): if msg.key not in ("all", 'tous', "cafe"): return chan = msg.chan.name message = " ".join([user.name for user in self.taemin.list_users(msg.chan)]) if msg.key == "cafe": message = "<<< CAFE !!! \\o/ %s \\o/ !!! CAFE >>>" % message else: message = "%s %s" % (message, msg.value) self.privmsg(chan, message)
Add an alias !tous for !all
Add an alias !tous for !all
Python
mit
ningirsu/taemin,ningirsu/taemin
--- +++ @@ -9,7 +9,7 @@ def on_pubmsg(self, msg): - if msg.key not in ("all", "cafe"): + if msg.key not in ("all", 'tous', "cafe"): return chan = msg.chan.name
edf151feea948ebf4a9f00a0248ab1f363cacfac
scaffolder/commands/install.py
scaffolder/commands/install.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'install: Installs a Project Template.' parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} ACTION [OPTIONS]'.format(name) ) aliases = ('tmp',) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder import get_minion_path from scaffolder.core.template import TemplateManager from scaffolder.core.commands import BaseCommand class InstallCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-t", "--target", dest="target_dir", default=get_minion_path('weaver'), help='Project Templates directory.', metavar="TEMPLATES_DIR" ), ) help = 'Installs a Project Template.' def run(self, *args, **options): src = args[0] tgt = options.get('target_dir') manager = TemplateManager() manager.install(src=src, dest=tgt)
Remove __init__ method, not needed.
InstallCommand: Remove __init__ method, not needed.
Python
mit
goliatone/minions
--- +++ @@ -19,15 +19,8 @@ ), ) - def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): - help = 'install: Installs a Project Template.' - parser = OptionParser( - version=self.get_version(), - option_list=self.get_option_list(), - usage='\n %prog {0} ACTION [OPTIONS]'.format(name) - ) - aliases = ('tmp',) - BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) + help = 'Installs a Project Template.' + def run(self, *args, **options): src = args[0]
378b820bd2a474640974ccefc7b224caebc9400f
saleor/graphql/order/resolvers.py
saleor/graphql/order/resolvers.py
from django.core.exceptions import PermissionDenied from ...order import models from ..utils import get_node from .types import Order def resolve_orders(info): user = info.context.user if user.is_anonymous: raise PermissionDenied('You have no permission to see this') if user.get_all_permissions() & {'order.view_order', 'order.edit_order'}: return models.Order.objects.all().distinct().prefetch_related('lines') return user.orders.confirmed().distinct().prefetch_related('lines') def resolve_order(info, id): """Return order only for user assigned to it or proper staff user.""" order = get_node(info, id, only_type=Order) user = info.context.user if (order.user == user or user.get_all_permissions() & { 'order.view_order', 'order.edit_order'}): return order
from django.core.exceptions import PermissionDenied from ...order import models from ..utils import get_node from .types import Order def resolve_orders(info): user = info.context.user if user.is_anonymous: raise PermissionDenied('You have no permission to see this order.') if user.get_all_permissions() & {'order.view_order', 'order.edit_order'}: return models.Order.objects.all().distinct().prefetch_related('lines') return user.orders.confirmed().distinct().prefetch_related('lines') def resolve_order(info, id): """Return order only for user assigned to it or proper staff user.""" order = get_node(info, id, only_type=Order) user = info.context.user if (order.user == user or user.get_all_permissions() & { 'order.view_order', 'order.edit_order'}): return order
Add more info to the permission denied exception
Add more info to the permission denied exception
Python
bsd-3-clause
mociepka/saleor,UITools/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,mociepka/saleor
--- +++ @@ -8,7 +8,7 @@ def resolve_orders(info): user = info.context.user if user.is_anonymous: - raise PermissionDenied('You have no permission to see this') + raise PermissionDenied('You have no permission to see this order.') if user.get_all_permissions() & {'order.view_order', 'order.edit_order'}: return models.Order.objects.all().distinct().prefetch_related('lines') return user.orders.confirmed().distinct().prefetch_related('lines')
bc47862e89f73ec152a57bf43126653a981cd411
suggestions/tests.py
suggestions/tests.py
from django.test import TestCase from django.contrib.auth.models import User from mks.models import Member from .models import Suggestion class SuggestionsTests(TestCase): def setUp(self): self.member = Member.objects.create(name='mk_1') self.regular_user = User.objects.create_user('reg_user') def test_simple_text_suggestion(self): MK_SITE = 'http://mk1.example.com' suggestion = Suggestion.objects.create_suggestion( suggested_by=self.regular_user, content_object=self.member, suggestion_action=Suggestion.UPDATE, suggested_field='website', suggested_text=MK_SITE ) self.assertIsNone(self.member.website) suggestion.auto_apply() mk = Member.objects.get(pk=self.member.pk) self.assertEqual(mk.website, MK_SITE)
from django.test import TestCase from django.contrib.auth.models import User from mks.models import Member from .models import Suggestion class SuggestionsTests(TestCase): def setUp(self): self.member = Member.objects.create(name='mk_1') self.regular_user = User.objects.create_user('reg_user') def test_simple_text_suggestion(self): MK_SITE = 'http://mk1.example.com' suggestion = Suggestion.objects.create_suggestion( suggested_by=self.regular_user, content_object=self.member, suggestion_action=Suggestion.UPDATE, suggested_field='website', suggested_text=MK_SITE ) self.assertIsNone(self.member.website) suggestion.auto_apply() mk = Member.objects.get(pk=self.member.pk) self.assertEqual(mk.website, MK_SITE) # cleanup mk.website = None mk.save() self.member = mk
Undo member changes in test
Undo member changes in test
Python
bsd-3-clause
MeirKriheli/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,navotsil/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,daonb/Open-Knesset,habeanf/Open-Knesset,noamelf/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,ofri/Open-Knesset,Shrulik/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,ofri/Open-Knesset,otadmor/Open-Knesset,otadmor/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,daonb/Open-Knesset,MeirKriheli/Open-Knesset,navotsil/Open-Knesset,noamelf/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,OriHoch/Open-Knesset,OriHoch/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,otadmor/Open-Knesset,daonb/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,noamelf/Open-Knesset,alonisser/Open-Knesset,MeirKriheli/Open-Knesset,habeanf/Open-Knesset,jspan/Open-Knesset,DanaOshri/Open-Knesset
--- +++ @@ -28,3 +28,9 @@ mk = Member.objects.get(pk=self.member.pk) self.assertEqual(mk.website, MK_SITE) + + # cleanup + mk.website = None + mk.save() + + self.member = mk
f69bc50985a644f90c3f59d06cb7b99a6aeb3b53
migrations/versions/0209_email_branding_update.py
migrations/versions/0209_email_branding_update.py
""" Revision ID: 0209_email_branding_update Revises: 84c3b6eb16b3 Create Date: 2018-07-25 16:08:15.713656 """ from alembic import op import sqlalchemy as sa revision = '0209_email_branding_update' down_revision = '84c3b6eb16b3' def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('email_branding', sa.Column('text', sa.String(length=255), nullable=True)) op.execute('UPDATE email_branding SET text = name') op.execute('UPDATE email_branding SET name = NULL') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('email_branding', 'text') # ### end Alembic commands ###
""" Revision ID: 0209_email_branding_update Revises: 84c3b6eb16b3 Create Date: 2018-07-25 16:08:15.713656 """ from alembic import op import sqlalchemy as sa revision = '0209_email_branding_update' down_revision = '84c3b6eb16b3' def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('email_branding', sa.Column('text', sa.String(length=255), nullable=True)) op.execute('UPDATE email_branding SET text = name') op.execute('UPDATE email_branding SET name = NULL') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute('UPDATE email_branding SET name = text') op.drop_column('email_branding', 'text') # ### end Alembic commands ###
Move data back before dropping the column for downgrade
Move data back before dropping the column for downgrade
Python
mit
alphagov/notifications-api,alphagov/notifications-api
--- +++ @@ -23,5 +23,6 @@ def downgrade(): # ### commands auto generated by Alembic - please adjust! ### + op.execute('UPDATE email_branding SET name = text') op.drop_column('email_branding', 'text') # ### end Alembic commands ###
a97c6ebda62762501fdf5f18326c8c518d73635f
securedrop/source_app/forms.py
securedrop/source_app/forms.py
from flask_babel import gettext from flask_wtf import FlaskForm from wtforms import PasswordField from wtforms.validators import InputRequired, Regexp, Length from db import Source class LoginForm(FlaskForm): codename = PasswordField('codename', validators=[ InputRequired(message=gettext('This field is required.')), Length(1, Source.MAX_CODENAME_LEN, message=gettext('Field must be between 1 and ' '{max_codename_len} characters long. '.format( max_codename_len=Source.MAX_CODENAME_LEN))), # The regex here allows either whitespace (\s) or # alphanumeric characters (\W) except underscore (_) Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.')) ])
from flask_babel import gettext from flask_wtf import FlaskForm from wtforms import PasswordField from wtforms.validators import InputRequired, Regexp, Length from db import Source class LoginForm(FlaskForm): codename = PasswordField('codename', validators=[ InputRequired(message=gettext('This field is required.')), Length(1, Source.MAX_CODENAME_LEN, message=gettext('Field must be between 1 and ' '{max_codename_len} characters long. '.format( max_codename_len=Source.MAX_CODENAME_LEN))), # Make sure to allow dashes since some words in the wordlist have them Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.')) ])
Use @dachary's much clearer regex to validate codenames
Use @dachary's much clearer regex to validate codenames
Python
agpl-3.0
micahflee/securedrop,conorsch/securedrop,garrettr/securedrop,heartsucker/securedrop,conorsch/securedrop,heartsucker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,micahflee/securedrop,garrettr/securedrop,ehartsuyker/securedrop,garrettr/securedrop,garrettr/securedrop,micahflee/securedrop,ehartsuyker/securedrop,conorsch/securedrop,micahflee/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,heartsucker/securedrop
--- +++ @@ -13,7 +13,6 @@ message=gettext('Field must be between 1 and ' '{max_codename_len} characters long. '.format( max_codename_len=Source.MAX_CODENAME_LEN))), - # The regex here allows either whitespace (\s) or - # alphanumeric characters (\W) except underscore (_) - Regexp(r'(\s|[^\W_])+$', message=gettext('Invalid input.')) + # Make sure to allow dashes since some words in the wordlist have them + Regexp(r'[\sA-Za-z0-9-]+$', message=gettext('Invalid input.')) ])
52a3ab97f888734db3c602ac69a33660e6026bb6
linked-list/remove-k-from-list.py
linked-list/remove-k-from-list.py
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k class Node(object): # define constructor def __init__(self, value): self.value = value self.next = None class LinkedList(object): def __init__(self, head=None): self.head = head def add(self, new_node): current_node = self.head if self.head: while current_node.next: current_node = current_node.next current_node.next = new_node else: self.head = new_node def __repr__(self): current_node = self.head output_arr = [] while current_node: output_arr.append(str(current_node.data)) current_node = current_node.next return "->".join(output_arr)
# Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k class Node(object): def __init__(self, value): self.value = value self.next = None def remove_k_from_list(l, k): fake_head = Node(None) fake_head.next = l current_node = fake_head while current_node: while current_node.next and current_node.next.value == k: current_node.next = current_node.next.next current_node = current_node.next return fake_head.next
Remove linked list class and implement algorithm just using single method
Remove linked list class and implement algorithm just using single method
Python
mit
derekmpham/interview-prep,derekmpham/interview-prep
--- +++ @@ -1,27 +1,17 @@ # Given a singly linked list of integers l and an integer k, remove all elements from list l that have a value equal to k -class Node(object): # define constructor +class Node(object): def __init__(self, value): self.value = value self.next = None -class LinkedList(object): - def __init__(self, head=None): - self.head = head +def remove_k_from_list(l, k): + fake_head = Node(None) + fake_head.next = l + current_node = fake_head - def add(self, new_node): - current_node = self.head - if self.head: - while current_node.next: - current_node = current_node.next - current_node.next = new_node - else: - self.head = new_node - - def __repr__(self): - current_node = self.head - output_arr = [] - while current_node: - output_arr.append(str(current_node.data)) - current_node = current_node.next - return "->".join(output_arr) + while current_node: + while current_node.next and current_node.next.value == k: + current_node.next = current_node.next.next + current_node = current_node.next + return fake_head.next
bdfa468b4d60f326d6744b9a4766c228e8b1d692
python/tak/alphazero/config.py
python/tak/alphazero/config.py
from attrs import define, field from tak import mcts import torch from typing import Optional @define(slots=False) class Config: device: str = "cuda" server_port: int = 5001 lr: float = 1e-3 size: int = 3 rollout_config: mcts.Config = field( factory=lambda: mcts.Config( simulation_limit=25, root_noise_alpha=1.0, root_noise_mix=0.25, ) ) rollout_resignation_threshold: float = 0.95 rollout_ply_limit: int = 100 rollout_workers: int = 50 rollouts_per_step: int = 100 replay_buffer_steps: int = 4 train_batch: int = 64 train_positions: int = 1024 train_dtype: torch.dtype = torch.float32 serve_dtype: torch.dtype = torch.float16 save_path: Optional[str] = None save_freq: int = 10 train_steps: int = 10 wandb: bool = False job_name: Optional[str] = None project: str = "taktician-alphazero" def __attrs_post_init__(self): if self.device == "cpu": self.serve_dtype = torch.float32
from attrs import define, field from tak import mcts import torch from typing import Optional @define(slots=False) class Config: device: str = "cuda" server_port: int = 5432 lr: float = 1e-3 size: int = 3 rollout_config: mcts.Config = field( factory=lambda: mcts.Config( simulation_limit=25, root_noise_alpha=1.0, root_noise_mix=0.25, ) ) rollout_resignation_threshold: float = 0.95 rollout_ply_limit: int = 100 rollout_workers: int = 50 rollouts_per_step: int = 100 replay_buffer_steps: int = 4 train_batch: int = 64 train_positions: int = 1024 train_dtype: torch.dtype = torch.float32 serve_dtype: torch.dtype = torch.float16 save_path: Optional[str] = None save_freq: int = 10 train_steps: int = 10 wandb: bool = False job_name: Optional[str] = None project: str = "taktician-alphazero" def __attrs_post_init__(self): if self.device == "cpu": self.serve_dtype = torch.float32
Use a different default port
Use a different default port
Python
mit
nelhage/taktician,nelhage/taktician,nelhage/taktician,nelhage/taktician
--- +++ @@ -7,7 +7,7 @@ @define(slots=False) class Config: device: str = "cuda" - server_port: int = 5001 + server_port: int = 5432 lr: float = 1e-3
03d07a20928997ecc136884110311453217443c3
reportlab/platypus/__init__.py
reportlab/platypus/__init__.py
#copyright ReportLab Inc. 2000 #see license.txt for license details #history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/platypus/__init__.py?cvsroot=reportlab #$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $ __version__=''' $Id: __init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $ ''' __doc__='' from reportlab.platypus.flowables import Flowable, Image, Macro, PageBreak, Preformatted, Spacer, XBox, \ CondPageBreak, KeepTogether from reportlab.platypus.paragraph import Paragraph, cleanBlockQuotedText, ParaLines from reportlab.platypus.paraparser import ParaFrag from reportlab.platypus.tables import Table, TableStyle, CellStyle from reportlab.platypus.frames import Frame from reportlab.platypus.doctemplate import BaseDocTemplate, NextPageTemplate, PageTemplate, ActionFlowable, \ SimpleDocTemplate, FrameBreak from xpreformatted import XPreformatted
#copyright ReportLab Inc. 2000 #see license.txt for license details #history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/platypus/__init__.py?cvsroot=reportlab #$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $ __version__=''' $Id: __init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $ ''' __doc__='' from reportlab.platypus.flowables import Flowable, Image, Macro, PageBreak, Preformatted, Spacer, XBox, \ CondPageBreak, KeepTogether from reportlab.platypus.paragraph import Paragraph, cleanBlockQuotedText, ParaLines from reportlab.platypus.paraparser import ParaFrag from reportlab.platypus.tables import Table, TableStyle, CellStyle from reportlab.platypus.frames import Frame from reportlab.platypus.doctemplate import BaseDocTemplate, NextPageTemplate, PageTemplate, ActionFlowable, \ SimpleDocTemplate, FrameBreak, PageBegin from xpreformatted import XPreformatted
Add PageBegin to pkg exports
Add PageBegin to pkg exports
Python
bsd-3-clause
makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile,makinacorpus/reportlab-ecomobile
--- +++ @@ -1,8 +1,8 @@ #copyright ReportLab Inc. 2000 #see license.txt for license details #history http://cvs.sourceforge.net/cgi-bin/cvsweb.cgi/reportlab/platypus/__init__.py?cvsroot=reportlab -#$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $ -__version__=''' $Id: __init__.py,v 1.12 2000/11/29 17:28:50 rgbecker Exp $ ''' +#$Header: /tmp/reportlab/reportlab/platypus/__init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $ +__version__=''' $Id: __init__.py,v 1.13 2002/03/15 09:03:37 rgbecker Exp $ ''' __doc__='' from reportlab.platypus.flowables import Flowable, Image, Macro, PageBreak, Preformatted, Spacer, XBox, \ CondPageBreak, KeepTogether @@ -11,5 +11,5 @@ from reportlab.platypus.tables import Table, TableStyle, CellStyle from reportlab.platypus.frames import Frame from reportlab.platypus.doctemplate import BaseDocTemplate, NextPageTemplate, PageTemplate, ActionFlowable, \ - SimpleDocTemplate, FrameBreak + SimpleDocTemplate, FrameBreak, PageBegin from xpreformatted import XPreformatted
f678f5c1a197c504ae6703f3b4e5658f9e2db1f6
setuptools/tests/py26compat.py
setuptools/tests/py26compat.py
import sys import unittest import tarfile try: # provide skipIf for Python 2.4-2.6 skipIf = unittest.skipIf except AttributeError: def skipIf(condition, reason): def skipper(func): def skip(*args, **kwargs): return if condition: return skip return func return skipper def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ res = tarfile.open(*args, **kwargs) res.__exit__ = lambda exc_type, exc_value, traceback: self.close() res.__enter__ = lambda: res return res tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open tarfile_open = _tarfile_open_ex
import sys import unittest import tarfile try: # provide skipIf for Python 2.4-2.6 skipIf = unittest.skipIf except AttributeError: def skipIf(condition, reason): def skipper(func): def skip(*args, **kwargs): return if condition: return skip return func return skipper def _tarfile_open_ex(*args, **kwargs): """ Extend result as a context manager. """ res = tarfile.open(*args, **kwargs) res.__exit__ = lambda exc_type, exc_value, traceback: res.close() res.__enter__ = lambda: res return res tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
Remove spurious reference to self. Remove debugging code.
Remove spurious reference to self. Remove debugging code.
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
--- +++ @@ -20,9 +20,8 @@ Extend result as a context manager. """ res = tarfile.open(*args, **kwargs) - res.__exit__ = lambda exc_type, exc_value, traceback: self.close() + res.__exit__ = lambda exc_type, exc_value, traceback: res.close() res.__enter__ = lambda: res return res tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open -tarfile_open = _tarfile_open_ex
427dab842e2d8aea1610c3e23d792119dc60c94b
moksha/widgets/jquery_ui_theme.py
moksha/widgets/jquery_ui_theme.py
from tw.api import Widget, CSSLink class JQueryUITheme(Widget): css = [CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__)] template = ''
""" :mod:`moksha.widgets.jquery_ui_theme` - jQuery UI Theme ======================================================= .. moduleauthor:: Luke Macken <[email protected]> """ from tw.api import Widget, CSSLink, CSSLink ui_theme_css = CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__) ui_base_css = CSSLink(link='/css/jquery-ui/ui.base.css', css=[ui_theme_css], modname=__name__)
Update our customized jQuery ui widgets
Update our customized jQuery ui widgets
Python
apache-2.0
pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,mokshaproject/moksha,lmacken/moksha,pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,ralphbean/moksha,ralphbean/moksha,mokshaproject/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha
--- +++ @@ -1,5 +1,13 @@ -from tw.api import Widget, CSSLink +""" +:mod:`moksha.widgets.jquery_ui_theme` - jQuery UI Theme +======================================================= -class JQueryUITheme(Widget): - css = [CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__)] - template = '' +.. moduleauthor:: Luke Macken <[email protected]> +""" + +from tw.api import Widget, CSSLink, CSSLink + +ui_theme_css = CSSLink(link='/css/jquery-ui/ui.theme.css', modname=__name__) +ui_base_css = CSSLink(link='/css/jquery-ui/ui.base.css', + css=[ui_theme_css], + modname=__name__)
79d8c1e95f3c876e600e1637253c7afcf3f36763
nn_patterns/explainer/__init__.py
nn_patterns/explainer/__init__.py
from .base import * from .gradient_based import * from .misc import * from .pattern_based import * from .relevance_based import * def create_explainer(name, output_layer, patterns=None, to_layer=None, **kwargs): return { # Gradient based "gradient": GradientExplainer, "deconvnet": DeConvNetExplainer, "guided": GuidedBackpropExplainer, "gradient.alt": AlternativeGradientExplainer, # Relevance based "lrp.z": LRPZExplainer, "lrp.eps": LRPEpsExplainer, # Pattern based "patternnet": PatternNetExplainer, "patternnet.guided": GuidedPatternNetExplainer, "patternlrp": PatternLRPExplainer, }[name](output_layer, patterns=patterns, to_layer=to_layer, **kwargs)
from .base import * from .gradient_based import * from .misc import * from .pattern_based import * from .relevance_based import * def create_explainer(name, output_layer, patterns=None, to_layer=None, **kwargs): return { # Utility. "input": InputExplainer, "random": RandomExplainer, # Gradient based "gradient": GradientExplainer, "deconvnet": DeConvNetExplainer, "guided": GuidedBackpropExplainer, "gradient.alt": AlternativeGradientExplainer, # Relevance based "lrp.z": LRPZExplainer, "lrp.eps": LRPEpsExplainer, # Pattern based "patternnet": PatternNetExplainer, "patternnet.guided": GuidedPatternNetExplainer, "patternlrp": PatternLRPExplainer, }[name](output_layer, patterns=patterns, to_layer=to_layer, **kwargs)
Add input and random explainer to utility function.
Add input and random explainer to utility function.
Python
mit
pikinder/nn-patterns
--- +++ @@ -10,6 +10,10 @@ def create_explainer(name, output_layer, patterns=None, to_layer=None, **kwargs): return { + # Utility. + "input": InputExplainer, + "random": RandomExplainer, + # Gradient based "gradient": GradientExplainer, "deconvnet": DeConvNetExplainer,
689417cef23297e54b5f082e31539bd2381798bf
Persistence/RedisPersist.py
Persistence/RedisPersist.py
import redis class RedisPersist: _redis_connection = None def __init__(self, host="localhost", port=6379, db=0): self._redis_connection = redis.StrictRedis( host=host, port=port, db=db ) self._redis_connection.set('tmp_validate', 'tmp_validate') def save(self, key=None, jsonstr=None): if key is None: raise ValueError("Key must be present to persist game.") if jsonstr is None: raise ValueError("JSON is badly formed or not present") self._redis_connection.set(key, str(jsonstr)) def load(self, key=None): if key is None: raise ValueError("Key must be present to load game") return_result = self._redis_connection.get(key) if return_result is not None: return_result = str(return_result) return return_result
import redis class RedisPersist: _redis_connection = None def __init__(self, host="localhost", port=6379, db=0): self._redis_connection = redis.StrictRedis( host=host, port=port, db=db ) self._redis_connection.set('tmp_validate', 'tmp_validate') def save(self, key=None, jsonstr=None): if key is None: raise ValueError("Key must be present to persist game.") if jsonstr is None: raise ValueError("JSON is badly formed or not present") self._redis_connection.set(key, str(jsonstr), ex=(60*60)) def load(self, key=None): if key is None: raise ValueError("Key must be present to load game") return_result = self._redis_connection.get(key) if return_result is not None: return_result = str(return_result) return return_result
Remove debugging statements and provide support for Python 2.7
Remove debugging statements and provide support for Python 2.7
Python
apache-2.0
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
--- +++ @@ -17,7 +17,7 @@ raise ValueError("Key must be present to persist game.") if jsonstr is None: raise ValueError("JSON is badly formed or not present") - self._redis_connection.set(key, str(jsonstr)) + self._redis_connection.set(key, str(jsonstr), ex=(60*60)) def load(self, key=None): if key is None:
5f430b076ad70c23c430017a6aa7a7893530e995
deflect/management/commands/checkurls.py
deflect/management/commands/checkurls.py
from django.contrib.sites.models import Site from django.core.mail import mail_managers from django.core.management.base import NoArgsCommand from django.core.urlresolvers import reverse import requests from deflect.models import ShortURL class Command(NoArgsCommand): help = "Validate short URL redirect targets" def handle_noargs(self, *args, **options): message = '' for url in ShortURL.objects.all(): try: url.check_status() except requests.exceptions.RequestException as e: message += self.bad_redirect_text(url, e) mail_managers('go.corban.edu URL report', message) def bad_redirect_text(self, url, exception): """ Return informational text for a URL that raised an exception. """ base = 'http://%s' % Site.objects.get_current().domain return """ Redirect {key} with target {target} returns {error} Edit this short URL: {edit} """.format(key=url.key, target=url.long_url, error=exception, edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
from django.contrib.sites.models import Site from django.core.mail import mail_managers from django.core.management.base import NoArgsCommand from django.core.urlresolvers import reverse import requests from deflect.models import ShortURL class Command(NoArgsCommand): help = "Validate short URL redirect targets" domain = Site.objects.get_current().domain def handle_noargs(self, *args, **options): message = '' for url in ShortURL.objects.all(): try: url.check_status() except requests.exceptions.RequestException as e: message += self.url_exception_text(url, e) mail_managers('URL report for %s' % self.domain, message) def url_exception_text(self, url, exception): """Return text block for a URL exception.""" base = 'http://%s' % self.domain return """ Redirect {key} with target {target} returned {error} Edit this short URL: {edit} """.format(key=url.key, target=url.long_url, error=exception, edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
Improve subject and text of URL report email
Improve subject and text of URL report email
Python
bsd-3-clause
jbittel/django-deflect
--- +++ @@ -10,6 +10,7 @@ class Command(NoArgsCommand): help = "Validate short URL redirect targets" + domain = Site.objects.get_current().domain def handle_noargs(self, *args, **options): message = '' @@ -17,17 +18,15 @@ try: url.check_status() except requests.exceptions.RequestException as e: - message += self.bad_redirect_text(url, e) - mail_managers('go.corban.edu URL report', message) + message += self.url_exception_text(url, e) + mail_managers('URL report for %s' % self.domain, message) - def bad_redirect_text(self, url, exception): - """ - Return informational text for a URL that raised an - exception. - """ - base = 'http://%s' % Site.objects.get_current().domain + def url_exception_text(self, url, exception): + """Return text block for a URL exception.""" + base = 'http://%s' % self.domain return """ -Redirect {key} with target {target} returns {error} + +Redirect {key} with target {target} returned {error} Edit this short URL: {edit} """.format(key=url.key, target=url.long_url, error=exception,
a28b2bc45b69503a8133b0df98ffa96d9aa4e229
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
# Generated by Django 3.2.4 on 2021-06-21 05:46 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "unique_together": {("iss", "sub", "sid")}, }, ), ]
from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "verbose_name": "OIDC back channel logout event", "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ), ]
Modify migration file to include meta data changes
Modify migration file to include meta data changes The OIDCBackChannelLogoutEvent model's meta data was changed in commit f62a72b29f. Although this has no effect on the database, Django still wants to include the meta data in migrations. Since this migration file isn't yet included in any release, it can be modified, instead of creating a new migration file only for the meta data change.
Python
bsd-2-clause
City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers
--- +++ @@ -1,5 +1,3 @@ -# Generated by Django 3.2.4 on 2021-06-21 05:46 - from django.db import migrations, models import django.utils.timezone @@ -29,6 +27,8 @@ ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ + "verbose_name": "OIDC back channel logout event", + "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ),
83cd6a78a61a81bb2e431ee493dbe9b443e05927
fireplace/cards/wog/neutral_legendary.py
fireplace/cards/wog/neutral_legendary.py
from ..utils import * ## # Minions class OG_151: "Tentacle of N'Zoth" deathrattle = Hit(ALL_MINIONS, 1)
from ..utils import * ## # Minions
Fix copypaste error in card definitions
Fix copypaste error in card definitions
Python
agpl-3.0
NightKev/fireplace,beheh/fireplace,jleclanche/fireplace
--- +++ @@ -3,7 +3,3 @@ ## # Minions - -class OG_151: - "Tentacle of N'Zoth" - deathrattle = Hit(ALL_MINIONS, 1)
e8f2f1c9db328dd8116a44d9d934ecef3bc7fb5e
enum34_custom.py
enum34_custom.py
from enum import Enum, EnumMeta from functools import total_ordering class _MultiValueMeta(EnumMeta): def __init__(self, cls, bases, classdict): # make sure we only have tuple values, not single values for member in self.__members__.values(): if not isinstance(member.value, tuple): raise TypeError('{} = {!r}, should be tuple!' .format(member.name, member.value)) def __call__(cls, value): """Return the appropriate instance with any of the values listed.""" for member in cls: if value in member.value: return member # raise ValueError otherwise return super().__call__(value) class MultiValueEnum(Enum, metaclass=_MultiMeta): """Enum subclass where members are declared as tuples.""" @total_ordering class OrderableMixin: """Mixin for comparable Enums. The order is the definition order from smaller to bigger. """ def __eq__(self, other): if self.__class__ is other.__class__: return self.value == other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: names = self.__class__._member_names_ return names.index(self.name) < names.index(other.name) return NotImplemented
from enum import Enum, EnumMeta from functools import total_ordering class _MultiValueMeta(EnumMeta): def __init__(self, cls, bases, classdict): # make sure we only have tuple values, not single values for member in self.__members__.values(): if not isinstance(member.value, tuple): raise TypeError('{} = {!r}, should be tuple!' .format(member.name, member.value)) def __call__(cls, value): """Return the appropriate instance with any of the values listed.""" for member in cls: if value in member.value: return member else: raise ValueError("%s is not a valid %s" % (value, cls.__name__)) class MultiValueEnum(Enum, metaclass=_MultiMeta): """Enum subclass where members are declared as tuples.""" @total_ordering class OrderableMixin: """Mixin for comparable Enums. The order is the definition order from smaller to bigger. """ def __eq__(self, other): if self.__class__ is other.__class__: return self.value == other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: names = self.__class__._member_names_ return names.index(self.name) < names.index(other.name) return NotImplemented
Raise ValueError explicitly from __call__ rather than with super()
Raise ValueError explicitly from __call__ rather than with super() because super() would make another lookup, but we already know the value isn't there.
Python
mit
kissgyorgy/enum34-custom
--- +++ @@ -18,9 +18,9 @@ for member in cls: if value in member.value: return member + else: + raise ValueError("%s is not a valid %s" % (value, cls.__name__)) - # raise ValueError otherwise - return super().__call__(value) class MultiValueEnum(Enum, metaclass=_MultiMeta):
739a5a85a455105f01013b20762b1b493c4d5027
deflect/views.py
deflect/views.py
from __future__ import unicode_literals import base32_crockford import logging from django.db.models import F from django.http import Http404 from django.http import HttpResponsePermanentRedirect from django.shortcuts import get_object_or_404 from django.utils.timezone import now from .models import ShortURL from .models import VanityURL from .utils import add_query_params logger = logging.getLogger(__name__) def redirect(request, key): """ Given the short URL key, update the statistics and redirect the user to the destination URL, including available Google Analytics parameters. """ try: alias = VanityURL.objects.select_related().get(alias=key.upper()) key_id = alias.redirect.id except VanityURL.DoesNotExist: try: key_id = base32_crockford.decode(key) except ValueError as e: logger.warning("Error decoding redirect '%s': %s" % (key, e)) raise Http404 redirect = get_object_or_404(ShortURL, pk=key_id) ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1, last_used=now()) # Inject Google campaign parameters utm_params = {'utm_source': redirect.key, 'utm_campaign': redirect.campaign, 'utm_content': redirect.content, 'utm_medium': redirect.medium} url = add_query_params(redirect.long_url, utm_params) return HttpResponsePermanentRedirect(url)
from __future__ import unicode_literals import base32_crockford import logging from django.db.models import F from django.http import Http404 from django.http import HttpResponsePermanentRedirect from django.shortcuts import get_object_or_404 from django.utils.timezone import now from .models import ShortURL from .models import VanityURL from .utils import add_query_params logger = logging.getLogger(__name__) def redirect(request, key): """ Given the short URL key, update the statistics and redirect the user to the destination URL, including available Google Analytics parameters. """ try: alias = VanityURL.objects.select_related().get(alias=key.upper()) key_id = alias.redirect.id except VanityURL.DoesNotExist: try: key_id = base32_crockford.decode(key) except ValueError as e: logger.warning("Error decoding redirect: %s" % e) raise Http404 redirect = get_object_or_404(ShortURL, pk=key_id) ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1, last_used=now()) # Inject Google campaign parameters utm_params = {'utm_source': redirect.key, 'utm_campaign': redirect.campaign, 'utm_content': redirect.content, 'utm_medium': redirect.medium} url = add_query_params(redirect.long_url, utm_params) return HttpResponsePermanentRedirect(url)
Simplify invalid decode warning text
Simplify invalid decode warning text The string is already displayed in the error text, so there's no reason to duplicate it.
Python
bsd-3-clause
jbittel/django-deflect
--- +++ @@ -30,7 +30,7 @@ try: key_id = base32_crockford.decode(key) except ValueError as e: - logger.warning("Error decoding redirect '%s': %s" % (key, e)) + logger.warning("Error decoding redirect: %s" % e) raise Http404 redirect = get_object_or_404(ShortURL, pk=key_id)
53ba55615fbd02e83212aecaa0c37d1887adfc73
tests/test_tracer.py
tests/test_tracer.py
import unittest import sys from tests.utils import requires_python_version class TestTreeTrace(unittest.TestCase): maxDiff = None @requires_python_version(3.5) def test_async_forbidden(self): def check(body): with self.assertRaises(ValueError): exec(""" from birdseye.tracer import TreeTracerBase @TreeTracerBase() async def f(): """ + body) check('pass') if sys.version_info >= (3, 6): check('yield 1')
import sys import unittest from tests.utils import requires_python_version class TestTreeTrace(unittest.TestCase): maxDiff = None @requires_python_version(3.5) def test_async_forbidden(self): from birdseye.tracer import TreeTracerBase tracer = TreeTracerBase() with self.assertRaises(ValueError): exec(""" @tracer async def f(): pass""") if sys.version_info >= (3, 6): with self.assertRaises(ValueError): exec(""" @tracer async def f(): yield 1""")
Fix inner exec syntax error in python 2.7
Fix inner exec syntax error in python 2.7
Python
mit
alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye,alexmojaki/birdseye
--- +++ @@ -1,6 +1,5 @@ +import sys import unittest - -import sys from tests.utils import requires_python_version @@ -10,14 +9,15 @@ @requires_python_version(3.5) def test_async_forbidden(self): - def check(body): + from birdseye.tracer import TreeTracerBase + tracer = TreeTracerBase() + with self.assertRaises(ValueError): + exec(""" +@tracer +async def f(): pass""") + + if sys.version_info >= (3, 6): with self.assertRaises(ValueError): exec(""" -from birdseye.tracer import TreeTracerBase -@TreeTracerBase() -async def f(): """ + body) - - check('pass') - - if sys.version_info >= (3, 6): - check('yield 1') +@tracer +async def f(): yield 1""")
31d1e9a991923dcd748f26b3533f2736f04f6454
tests/test_typing.py
tests/test_typing.py
import typing from trafaretrecord import TrafaretRecord def test_initialization(): class A(TrafaretRecord): a: int b: str c: typing.List[int] tmp = A(a=1, b='1', c=[1, 2, 3]) assert repr(tmp) == "A(a=1, b='1', c=[1, 2, 3])" assert tmp._field_types == {'a': int, 'b': str, 'c': typing.List[int]} def test_class_properties(): class A(TrafaretRecord): a: int b: str c: typing.List[int] @property def B(self): return self.b.upper() @staticmethod def static(): return 'static method result' @classmethod def initialize(cls, *args, **kwargs): return cls(*args, **kwargs) tmp = A(a=1, b='b', c=[1, 2, 3]) assert tmp.b == 'b' assert tmp.B == 'B' assert tmp.static() == 'static method result' assert A.initialize(a=3, b='B', c=[4, 5, 6]) == A(a=3, b='B', c=[4, 5, 6])
import typing from trafaretrecord import TrafaretRecord def test_initialization(): class A(TrafaretRecord): a: int b: str c: typing.List[int] tmp = A(a=1, b='1', c=[1, 2, 3]) assert repr(tmp) == "A(a=1, b='1', c=[1, 2, 3])" assert tmp._field_types == {'a': int, 'b': str, 'c': typing.List[int]} def test_class_properties(): class A(TrafaretRecord): a: int b: str c: typing.List[int] @property def B(self): return self.b.upper() @B.setter def B(self, value): self._replace(b=value.lower()) @staticmethod def static(): return 'static method result' @classmethod def initialize(cls, *args, **kwargs): return cls(*args, **kwargs) tmp = A(a=1, b='b', c=[1, 2, 3]) assert tmp.b == 'b' assert tmp.B == 'B' tmp.B = 'BB' assert tmp.b == 'bb' assert tmp.B == 'BB' assert tmp.static() == 'static method result' assert A.initialize(a=3, b='B', c=[4, 5, 6]) == A(a=3, b='B', c=[4, 5, 6])
Add test for property setter
Add test for property setter
Python
mit
vovanbo/trafaretrecord,vovanbo/trafaretrecord
--- +++ @@ -24,6 +24,10 @@ def B(self): return self.b.upper() + @B.setter + def B(self, value): + self._replace(b=value.lower()) + @staticmethod def static(): return 'static method result' @@ -35,6 +39,9 @@ tmp = A(a=1, b='b', c=[1, 2, 3]) assert tmp.b == 'b' assert tmp.B == 'B' + tmp.B = 'BB' + assert tmp.b == 'bb' + assert tmp.B == 'BB' assert tmp.static() == 'static method result' assert A.initialize(a=3, b='B', c=[4, 5, 6]) == A(a=3, b='B', c=[4, 5, 6])
57ead9af05c95cee2354c55bb73f5fe26be3a256
rasterio/rio/main.py
rasterio/rio/main.py
# main: loader of all the command entry points. from pkg_resources import iter_entry_points from rasterio.rio.cli import cli # Find and load all entry points in the rasterio.rio_commands group. # This includes the standard commands included with Rasterio as well # as commands provided by other packages. # # At a mimimum, commands must use the rasterio.rio.cli.cli command # group decorator like so: # # from rasterio.rio.cli import cli # # @cli.command() # def foo(...): # ... for entry_point in iter_entry_points('rasterio.rio_commands'): entry_point.load()
# main: loader of all the command entry points. import sys import traceback from pkg_resources import iter_entry_points from rasterio.rio.cli import cli # Find and load all entry points in the rasterio.rio_commands group. # This includes the standard commands included with Rasterio as well # as commands provided by other packages. # # At a mimimum, commands must use the rasterio.rio.cli.cli command # group decorator like so: # # from rasterio.rio.cli import cli # # @cli.command() # def foo(...): # ... for entry_point in iter_entry_points('rasterio.rio_commands'): try: entry_point.load() except ImportError: # Catch this so a busted plugin doesn't take down the CLI. # Handled by registering a stub that does nothing other than # explain the error. msg = ( "Warning: plugin module could not be loaded. Contact " "its author for help.\n\n\b\n" + traceback.format_exc()) short_msg = ( "Warning: plugin module could not be loaded. See " "`rio %s --help` for details." % entry_point.name) @cli.command(entry_point.name, help=msg, short_help=short_msg) def cmd_stub(): sys.exit(0)
Handle plugin load errors in a helpful way.
Handle plugin load errors in a helpful way. On catching an ImportError, we make a dummy/stub subcommand that sits in the list of subcommands as expected and reports the error there.
Python
bsd-3-clause
youngpm/rasterio,johanvdw/rasterio,clembou/rasterio,youngpm/rasterio,njwilson23/rasterio,perrygeo/rasterio,johanvdw/rasterio,njwilson23/rasterio,kapadia/rasterio,perrygeo/rasterio,youngpm/rasterio,kapadia/rasterio,brendan-ward/rasterio,njwilson23/rasterio,kapadia/rasterio,brendan-ward/rasterio,johanvdw/rasterio,clembou/rasterio,clembou/rasterio,perrygeo/rasterio,brendan-ward/rasterio
--- +++ @@ -1,4 +1,7 @@ # main: loader of all the command entry points. + +import sys +import traceback from pkg_resources import iter_entry_points @@ -19,4 +22,19 @@ # ... for entry_point in iter_entry_points('rasterio.rio_commands'): - entry_point.load() + try: + entry_point.load() + except ImportError: + # Catch this so a busted plugin doesn't take down the CLI. + # Handled by registering a stub that does nothing other than + # explain the error. + msg = ( + "Warning: plugin module could not be loaded. Contact " + "its author for help.\n\n\b\n" + + traceback.format_exc()) + short_msg = ( + "Warning: plugin module could not be loaded. See " + "`rio %s --help` for details." % entry_point.name) + @cli.command(entry_point.name, help=msg, short_help=short_msg) + def cmd_stub(): + sys.exit(0)
69a339c792e2545cbd12c126a5b0865e4cf1e7e5
paystackapi/tests/test_product.py
paystackapi/tests/test_product.py
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product # class TestProduct(BaseTestCase): # @httpretty.activate # def test_valid_create(self): # pass
import httpretty from paystackapi.tests.base_test_case import BaseTestCase from paystackapi.product import Product class TestProduct(BaseTestCase): @httpretty.activate def test_product_create(self): """Method defined to test product creation.""" httpretty.register_uri( httpretty.POST, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Product successfully created"}', status=201, ) response = Product.create( name="Product pypaystack test", description="my test description", price=500000, currency="NGN" ) self.assertTrue(response['status']) @httpretty.activate def test_product_list(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}', status=201, ) response = Product.list() self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.GET, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.fetch(5499) self.assertEqual(response['status'], True) @httpretty.activate def test_product_fetch(self): """Function defined to test Product list method.""" httpretty.register_uri( httpretty.PUT, self.endpoint_url("/product/5499"), content_type='text/json', body='{"status": true, "message": "Products retrieved", "data":[{}]}', status=201, ) response = Product.update(product_id=5499, name="Product pypaystack test", description="my test description", price=500000000, currency="USD" ) self.assertEqual(response['status'], True)
Add test cases for product.
Add test cases for product.
Python
mit
andela-sjames/paystack-python
--- +++ @@ -4,8 +4,66 @@ from paystackapi.product import Product -# class TestProduct(BaseTestCase): +class TestProduct(BaseTestCase): -# @httpretty.activate -# def test_valid_create(self): -# pass + @httpretty.activate + def test_product_create(self): + """Method defined to test product creation.""" + httpretty.register_uri( + httpretty.POST, + self.endpoint_url("/product"), + content_type='text/json', + body='{"status": true, "message": "Product successfully created"}', + status=201, + ) + + response = Product.create( + name="Product pypaystack test", description="my test description", + price=500000, currency="NGN" + ) + self.assertTrue(response['status']) + + @httpretty.activate + def test_product_list(self): + """Function defined to test Product list method.""" + httpretty.register_uri( + httpretty.GET, + self.endpoint_url("/product"), + content_type='text/json', + body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}', + status=201, + ) + + response = Product.list() + self.assertEqual(response['status'], True) + + @httpretty.activate + def test_product_fetch(self): + """Function defined to test Product list method.""" + httpretty.register_uri( + httpretty.GET, + self.endpoint_url("/product/5499"), + content_type='text/json', + body='{"status": true, "message": "Products retrieved", "data":[{}]}', + status=201, + ) + + response = Product.fetch(5499) + self.assertEqual(response['status'], True) + + @httpretty.activate + def test_product_fetch(self): + """Function defined to test Product list method.""" + httpretty.register_uri( + httpretty.PUT, + self.endpoint_url("/product/5499"), + content_type='text/json', + body='{"status": true, "message": "Products retrieved", "data":[{}]}', + status=201, + ) + + response = Product.update(product_id=5499, name="Product pypaystack test", + description="my test description", price=500000000, + currency="USD" + ) + self.assertEqual(response['status'], True)
71e64dea686a57e358f87c926bf8c22313e99266
django_project/localities/tests/test_model_AttributeArchive.py
django_project/localities/tests/test_model_AttributeArchive.py
# -*- coding: utf-8 -*- from django.test import TestCase from .model_factories import AttributeF from ..models import AttributeArchive class TestModelAttributeArchive(TestCase): def test_archiving_attrbute(self): attribute = AttributeF.create(key='A key') attribute.description = 'a new descritpion' attribute.save() attribute.key = 'a new key' attribute.save() attribute.key = 'A key' attribute.save() # test save with no changes, should not trigger model archival attribute.key = 'A key' attribute.save() self.assertEqual(AttributeArchive.objects.count(), 4) self.assertListEqual( [attr.key for attr in AttributeArchive.objects.all()], ['a_key', 'a_key', 'a_new_key', 'a_key'] ) self.assertListEqual( [attr.version for attr in AttributeArchive.objects.all()], [1, 2, 3, 4] )
# -*- coding: utf-8 -*- from django.test import TestCase from .model_factories import AttributeF from ..models import AttributeArchive class TestModelAttributeArchive(TestCase): def test_archiving_attribute(self): attribute = AttributeF.create(key='A key') attribute.description = 'a new descritpion' attribute.save() attribute.key = 'a new key' attribute.save() attribute.key = 'A key' attribute.save() # test save with no changes, should not trigger model archival attribute.key = 'A key' attribute.save() self.assertEqual(AttributeArchive.objects.count(), 4) self.assertListEqual( [attr.key for attr in AttributeArchive.objects.all()], ['a_key', 'a_key', 'a_new_key', 'a_key'] ) self.assertListEqual( [attr.version for attr in AttributeArchive.objects.all()], [1, 2, 3, 4] )
Fix spelling test name spelling error
Fix spelling test name spelling error
Python
bsd-2-clause
ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites,ismailsunni/healthsites
--- +++ @@ -7,7 +7,7 @@ class TestModelAttributeArchive(TestCase): - def test_archiving_attrbute(self): + def test_archiving_attribute(self): attribute = AttributeF.create(key='A key') attribute.description = 'a new descritpion'
096564c95371510769a7dec31cd5d90bf2c56955
scripts/migration/migrate_confirmed_user_emails.py
scripts/migration/migrate_confirmed_user_emails.py
"""Ensure that users with User.emails == [] have User.username inserted. """ import logging import sys from modularodm import Q from nose.tools import * from website import models from website.app import init_app from scripts import utils as scripts_utils logger = logging.getLogger(__name__) def main(): # Set up storage backends init_app(routes=False) dry_run = 'dry' in sys.argv if not dry_run: scripts_utils.add_file_logger(logger, __file__) logger.info("Iterating users with username not in confirmed emails") for user in get_users_with_username_not_in_emails(): add_username_to_emails(user) logger.info(repr(user)) if not dry_run: user.save() def get_users_with_username_not_in_emails(): return models.User.find( Q('date_confirmed', 'ne', None) & Q('emails', 'eq', []) ) def add_username_to_emails(user): user.emails.append(user.username) if __name__ == '__main__': main()
"""Ensure that confirmed users' usernames are included in their emails field. """ import logging import sys from modularodm import Q from website import models from website.app import init_app from scripts import utils as scripts_utils logger = logging.getLogger(__name__) def main(): # Set up storage backends init_app(routes=False) dry_run = 'dry' in sys.argv count = 0 if not dry_run: scripts_utils.add_file_logger(logger, __file__) logger.info("Finding users with username not in confirmed emails") for user in get_users_with_username_not_in_emails(): user.emails.append(user.username) logger.info(repr(user)) if not dry_run: user.save() count += 1 logger.info('Migrated {} users'.format(count)) def get_users_with_username_not_in_emails(): return ( user for user in models.User.find(Q('date_confirmed', 'ne', None)) if user.is_active and user.username.lower() not in [email.lower() for email in user.emails] and user.username is not None ) if __name__ == '__main__': main()
Update migration script for users whose usernames aren't in emails field
Update migration script for users whose usernames aren't in emails field OSF-5462 Previously the script only migrated users who had an empty emails field. This updates the script to also handle users whose username isn't in the emails field, even when the emails field isn't empty
Python
apache-2.0
wearpants/osf.io,chrisseto/osf.io,felliott/osf.io,samchrisinger/osf.io,saradbowman/osf.io,jnayak1/osf.io,alexschiller/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,abought/osf.io,icereval/osf.io,caneruguz/osf.io,RomanZWang/osf.io,abought/osf.io,TomBaxter/osf.io,hmoco/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,aaxelb/osf.io,amyshi188/osf.io,chennan47/osf.io,mattclark/osf.io,abought/osf.io,doublebits/osf.io,sloria/osf.io,crcresearch/osf.io,doublebits/osf.io,TomHeatwole/osf.io,wearpants/osf.io,Nesiehr/osf.io,leb2dg/osf.io,alexschiller/osf.io,RomanZWang/osf.io,erinspace/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,mfraezz/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,acshi/osf.io,sloria/osf.io,mluke93/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,mluke93/osf.io,icereval/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,binoculars/osf.io,zachjanicki/osf.io,caneruguz/osf.io,cslzchen/osf.io,chrisseto/osf.io,jnayak1/osf.io,zachjanicki/osf.io,mluo613/osf.io,KAsante95/osf.io,baylee-d/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,chrisseto/osf.io,cwisecarver/osf.io,brandonPurvis/osf.io,emetsger/osf.io,mluo613/osf.io,cwisecarver/osf.io,RomanZWang/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,felliott/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,hmoco/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,billyhunt/osf.io,adlius/osf.io,emetsger/osf.io,TomHeatwole/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,hmoco/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,erinspace/osf.io,leb2dg/osf.io,rdhyee/osf.io,TomBaxter/osf.io,asanfilippo7/osf.io,icereval/osf.io,RomanZWang/osf.io,amyshi188/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,alexschiller/osf.io,kch8qx/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,mattclark/osf.io,chennan47/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,adlius/osf.io,cslzchen/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,pattisdr/osf.io,caseyrollins/osf.io,kwierman/osf.io,binoculars/osf.io,zamattiac/osf.io,aaxelb/osf.io,caneruguz/osf.io,kch8qx/osf.io,billyhunt/osf.io,monikagrabowska/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,mluo613/osf.io,baylee-d/osf.io,jnayak1/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,cslzchen/osf.io,asanfilippo7/osf.io,DanielSBrown/osf.io,acshi/osf.io,acshi/osf.io,SSJohns/osf.io,jnayak1/osf.io,adlius/osf.io,kch8qx/osf.io,chrisseto/osf.io,mattclark/osf.io,brianjgeiger/osf.io,doublebits/osf.io,GageGaskins/osf.io,caneruguz/osf.io,baylee-d/osf.io,mluo613/osf.io,KAsante95/osf.io,kwierman/osf.io,SSJohns/osf.io,mluo613/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,zamattiac/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,TomBaxter/osf.io,emetsger/osf.io,mfraezz/osf.io,wearpants/osf.io,laurenrevere/osf.io,alexschiller/osf.io,acshi/osf.io,leb2dg/osf.io,saradbowman/osf.io,kwierman/osf.io,adlius/osf.io,GageGaskins/osf.io,crcresearch/osf.io,crcresearch/osf.io,erinspace/osf.io,leb2dg/osf.io,mluke93/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,hmoco/osf.io,kch8qx/osf.io,KAsante95/osf.io,pattisdr/osf.io,abought/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,emetsger/osf.io,rdhyee/osf.io,billyhunt/osf.io,felliott/osf.io,rdhyee/osf.io,felliott/osf.io,alexschiller/osf.io,GageGaskins/osf.io,sloria/osf.io,wearpants/osf.io,doublebits/osf.io,caseyrollins/osf.io,acshi/osf.io,DanielSBrown/osf.io,KAsante95/osf.io
--- +++ @@ -1,12 +1,10 @@ -"""Ensure that users with User.emails == [] have User.username inserted. - +"""Ensure that confirmed users' usernames are included in their emails field. """ import logging import sys from modularodm import Q -from nose.tools import * from website import models from website.app import init_app @@ -20,26 +18,27 @@ # Set up storage backends init_app(routes=False) dry_run = 'dry' in sys.argv + count = 0 if not dry_run: scripts_utils.add_file_logger(logger, __file__) - logger.info("Iterating users with username not in confirmed emails") + logger.info("Finding users with username not in confirmed emails") for user in get_users_with_username_not_in_emails(): - add_username_to_emails(user) + user.emails.append(user.username) logger.info(repr(user)) if not dry_run: user.save() + count += 1 + logger.info('Migrated {} users'.format(count)) def get_users_with_username_not_in_emails(): - return models.User.find( - Q('date_confirmed', 'ne', None) - & Q('emails', 'eq', []) + return ( + user for user in + models.User.find(Q('date_confirmed', 'ne', None)) + if user.is_active and + user.username.lower() not in [email.lower() for email in user.emails] and + user.username is not None ) - - -def add_username_to_emails(user): - user.emails.append(user.username) - if __name__ == '__main__': main()
4053e98a8d337628760233c40915fde43f22d1e2
events/models.py
events/models.py
from django.db import models from django.conf import settings # Create your models here. #Events : # Des users peuvent participer à un event # Les gens peuvnet être "intéressés" # Utiliser https://github.com/thoas/django-sequere ? # API hackeragenda class Event(models.Model): STATUS_CHOICES = ( ("i", "in preparation"), ("r", "ready"), ("p", "planned"), ("j", "just an idea"), ) place = models.CharField(max_length=300) start = models.DateTimeField() stop = models.DateTimeField() title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) organizer = models.OneToOneField(settings.AUTH_USER_MODEL)
from django.db import models from django.conf import settings # Create your models here. #Events : # Des users peuvent participer à un event # Les gens peuvnet être "intéressés" # Utiliser https://github.com/thoas/django-sequere ? # API hackeragenda class Event(models.Model): STATUS_CHOICES = ( ("i", "in preparation"), ("r", "ready"), ("p", "planned"), ("j", "just an idea"), ) place = models.CharField(max_length=300) start = models.DateTimeField() stop = models.DateTimeField() title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) organizer = models.ForeignKey(settings.AUTH_USER_MODEL)
Use ForeignKey instead of OneToOneField for event organizer
[fix] Use ForeignKey instead of OneToOneField for event organizer
Python
agpl-3.0
UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator
--- +++ @@ -20,4 +20,4 @@ stop = models.DateTimeField() title = models.CharField(max_length=300) status = models.CharField(max_length=1, choices=STATUS_CHOICES) - organizer = models.OneToOneField(settings.AUTH_USER_MODEL) + organizer = models.ForeignKey(settings.AUTH_USER_MODEL)
352aeadf68c102b03dc7fcc243e46c3442132c1d
pychecker/test_input/test70.py
pychecker/test_input/test70.py
'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0
'test checking constant conditions' # __pychecker__ = '' def func1(x): 'should not produce a warning' if 1: pass while 1: print x break assert x, 'test' return 0 def func2(x): 'should produce a warning' __pychecker__ = 'constant1' if 1: pass while 1: print x break return 0 def func3(x): 'should produce a warning' if 21: return 1 if 31: return 2 assert(x, 'test') assert(5, 'test') assert 5, 'test' if 'str': return 3 return 4 def func4(x): 'should not produce a warning' if x == 204 or x == 201 or 200 <= x < 300: x = 0 if x == 1: pass while x == 'str': print x break return 0 def func5(need_quotes, text): 'should not produce a warning' return (need_quotes) and ('"%s"' % text) or (text)
Fix a problem reported by Greg Ward and pointed out by John Machin when doing:
Fix a problem reported by Greg Ward and pointed out by John Machin when doing: return (need_quotes) and ('"%s"' % text) or (text) The following warning was generated: Using a conditional statement with a constant value ("%s") This was because even the stack wasn't modified after a BINARY_MODULO to say the value on the stack was no longer const.
Python
bsd-3-clause
smspillaz/pychecker,smspillaz/pychecker,smspillaz/pychecker
--- +++ @@ -47,3 +47,8 @@ print x break return 0 + +def func5(need_quotes, text): + 'should not produce a warning' + return (need_quotes) and ('"%s"' % text) or (text) +
69c2921f308ef1bd102ba95152ebdeccf72b8f6e
source/seedsource/tasks/cleanup_tifs.py
source/seedsource/tasks/cleanup_tifs.py
from django.conf import settings import os import os.path import time import re from celery.task import task @task def cleanup_temp_tif_files(age=7200): temp_dir = settings.DATASET_DOWNLOAD_DIR cutoff = time.time() - age t_files = os.listdir(temp_dir) for t_file in t_files: if re.search('.tif$', t_file): path = os.path.join(temp_dir, t_file) if os.path.getctime(path) < cutoff: try: os.remove(path) except OSError: pass
from django.conf import settings import os import os.path import time import re from celery.task import task @task def cleanup_temp_tif_files(age=7200): temp_dir = settings.DATASET_DOWNLOAD_DIR cutoff = time.time() - age t_files = os.listdir(temp_dir) for t_file in t_files: if re.search('.zip$', t_file): path = os.path.join(temp_dir, t_file) if os.path.getctime(path) < cutoff: try: os.remove(path) except OSError: pass
Update GeoTIFF cleanup to cleanup .zip
Update GeoTIFF cleanup to cleanup .zip
Python
bsd-3-clause
consbio/seedsource,consbio/seedsource,consbio/seedsource
--- +++ @@ -11,7 +11,7 @@ cutoff = time.time() - age t_files = os.listdir(temp_dir) for t_file in t_files: - if re.search('.tif$', t_file): + if re.search('.zip$', t_file): path = os.path.join(temp_dir, t_file) if os.path.getctime(path) < cutoff: try:
55646644c18fe5e10669743025cc00b8225f9908
south/introspection_plugins/__init__.py
south/introspection_plugins/__init__.py
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions import south.introspection_plugins.annoying_autoonetoone
Add import of django-annoying patch
Add import of django-annoying patch
Python
apache-2.0
philipn/django-south,philipn/django-south,nimnull/django-south,RaD/django-south,RaD/django-south,RaD/django-south,nimnull/django-south
--- +++ @@ -6,4 +6,5 @@ import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions +import south.introspection_plugins.annoying_autoonetoone
65a78d5aafdbba03812995f38e31fba0621e350e
setup_utils.py
setup_utils.py
import os import re def update_pins(setup_args): # Use requirements and constraints to set version pins packages = set() with open('./requirements.txt') as requirements: for r in requirements: if r.lower().strip() == 'dallinger': continue if not r.startswith('-') or r.startswith('#'): packages.add(r.strip().lower()) requirements = [] REQUIREMENT_RE = re.compile(r'^(([^=]+)==[^#]+)(#.*)?$') with open('./constraints.txt') as constraints: for c in constraints: matches = REQUIREMENT_RE.match(c.strip()) if not matches: continue match = matches.group(2).lower().strip() req = matches.group(1).strip() if match in packages: requirements.append(req) # pin extra requirements for extra in setup_args['extras_require']: extra_packages = setup_args['extras_require'][extra] for i, package in enumerate(extra_packages[:]): if package.lower() == match: extra_packages[i] = req if requirements: setup_args['install_requires'] = requirements # If not on Heroku, install setuptools-markdown. try: os.environ["DYNO"] except KeyError: setup_args.update({ "setup_requires": ['setuptools-markdown==0.2'], "long_description_markdown_filename": 'README.md', })
import os import re REQUIREMENT_RE = re.compile(r'^(([^=]+)[=<>]+[^#]+)(#.*)?$') def update_pins(setup_args): # Use requirements and constraints to set version pins packages = set() install_dir = os.path.dirname(__file__) with open(os.path.join(install_dir, 'requirements.txt')) as requirements: for r in requirements: if r.lower().strip() == 'dallinger': continue if not r.startswith('-') or r.startswith('#'): packages.add(r.strip().lower()) requirements = [] with open(os.path.join(install_dir, 'constraints.txt')) as constraints: for c in constraints: matches = REQUIREMENT_RE.match(c.strip()) if not matches: continue match = matches.group(2).lower().strip() req = matches.group(1).strip() if match in packages: requirements.append(req) # pin extra requirements for extra in setup_args['extras_require']: extra_packages = setup_args['extras_require'][extra] for i, package in enumerate(extra_packages[:]): if package.lower() == match: extra_packages[i] = req if requirements: setup_args['install_requires'] = requirements # If not on Heroku, install setuptools-markdown. try: os.environ["DYNO"] except KeyError: setup_args.update({ "setup_requires": ['setuptools-markdown==0.2'], "long_description_markdown_filename": 'README.md', })
Address review concerns: allow range requirements, specify requirments file path explicitly, ...
Address review concerns: allow range requirements, specify requirments file path explicitly, ...
Python
mit
Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger
--- +++ @@ -1,11 +1,14 @@ import os import re + +REQUIREMENT_RE = re.compile(r'^(([^=]+)[=<>]+[^#]+)(#.*)?$') def update_pins(setup_args): # Use requirements and constraints to set version pins packages = set() - with open('./requirements.txt') as requirements: + install_dir = os.path.dirname(__file__) + with open(os.path.join(install_dir, 'requirements.txt')) as requirements: for r in requirements: if r.lower().strip() == 'dallinger': continue @@ -13,8 +16,7 @@ packages.add(r.strip().lower()) requirements = [] - REQUIREMENT_RE = re.compile(r'^(([^=]+)==[^#]+)(#.*)?$') - with open('./constraints.txt') as constraints: + with open(os.path.join(install_dir, 'constraints.txt')) as constraints: for c in constraints: matches = REQUIREMENT_RE.match(c.strip()) if not matches:
14edc2e547f3dbad0777c8fccc23a0d0b6a0019f
plugins/star.py
plugins/star.py
import urllib.request import urllib.error import json import plugin import command import message import os def onInit(plugin): star_command = command.command(plugin, 'star', shortdesc='Post a random picture of Star Butterfly to the channel') return plugin.plugin.plugin(plugin, 'star', [star_command]) def onCommand(message_in): if message_in.command == 'star': try: f = urllib.request.urlopen("https://sydneyerickson.me/starapi/rand.php").read().decode("utf-8") except urllib.error.URLError as e: return message.message(body='There was an issue connecting to XKCD'.format(message_in.body)) imageName = f.split('/') if os.path.isfile('cache/star_' + imageName[-1]): pass else: urllib.request.urlretrieve(f, 'cache/star_' + imageName[-1]) return message.message(file='cache/star_' + imageName[-1])
import urllib.request import urllib.error import json import plugin import command import message import caching import os def onInit(plugin): star_command = command.command(plugin, 'star', shortdesc='Post a random picture of Star Butterfly to the channel') return plugin.plugin.plugin(plugin, 'star', [star_command]) def onCommand(message_in): if message_in.command == 'star': try: f = urllib.request.urlopen("https://sydneyerickson.me/starapi/rand.php").read().decode("utf-8") except urllib.error.URLError as e: return message.message(body='There was an issue connecting to Starapi'.format(message_in.body)) imageName = f.split('/') caching.downloadToCache(f, imageName[-1]) return message.message(file='cache/star_' + imageName[-1])
Update Star plugin to use new caching API
Update Star plugin to use new caching API
Python
apache-2.0
dhinakg/BitSTAR,dhinakg/BitSTAR,StarbotDiscord/Starbot,StarbotDiscord/Starbot
--- +++ @@ -4,6 +4,7 @@ import plugin import command import message +import caching import os def onInit(plugin): @@ -15,12 +16,9 @@ try: f = urllib.request.urlopen("https://sydneyerickson.me/starapi/rand.php").read().decode("utf-8") except urllib.error.URLError as e: - return message.message(body='There was an issue connecting to XKCD'.format(message_in.body)) + return message.message(body='There was an issue connecting to Starapi'.format(message_in.body)) imageName = f.split('/') - if os.path.isfile('cache/star_' + imageName[-1]): - pass - else: - urllib.request.urlretrieve(f, 'cache/star_' + imageName[-1]) + caching.downloadToCache(f, imageName[-1]) return message.message(file='cache/star_' + imageName[-1])
1afa686eafbaa4392e81cad881db55e1fafb112f
src/armet/connectors/bottle/__init__.py
src/armet/connectors/bottle/__init__.py
# -*- coding: utf-8 -*- from __future__ import print_function, unicode_literals, division def is_available(*capacities): """ Detects if the environment is available for use in the (optionally) specified capacities. """ try: # Attempted import import bottle # flake8: noqa # TODO: Add additional checks to assert that flask is actually # in use and available. # Detected connector. return True except ImportError: # Failed to import. return False
Add detection support for bottle.
Add detection support for bottle.
Python
mit
armet/python-armet
--- +++ @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +from __future__ import print_function, unicode_literals, division + + +def is_available(*capacities): + """ + Detects if the environment is available for use in + the (optionally) specified capacities. + """ + try: + # Attempted import + import bottle # flake8: noqa + + # TODO: Add additional checks to assert that flask is actually + # in use and available. + + # Detected connector. + return True + + except ImportError: + # Failed to import. + return False
0433623b8e15559fe304e6406e58b1cd2639493f
apps/polls/tests.py
apps/polls/tests.py
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
import datetime from django.utils import timezone from django.test import TestCase from apps.polls.models import Poll class PollMethodTests(TestCase): def test_was_published_recently_with_future_poll(self): """ was_published_recently() should return False for polls whose pub_date is in the future """ future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30)) self.assertEqual(future_poll.was_published_recently(), False)
Create a test to expose the bug
Create a test to expose the bug
Python
bsd-3-clause
datphan/teracy-tutorial
--- +++ @@ -1,16 +1,16 @@ -""" -This file demonstrates writing tests using the unittest module. These will pass -when you run "manage.py test". +import datetime -Replace this with more appropriate tests for your application. -""" - +from django.utils import timezone from django.test import TestCase +from apps.polls.models import Poll -class SimpleTest(TestCase): - def test_basic_addition(self): +class PollMethodTests(TestCase): + + def test_was_published_recently_with_future_poll(self): """ - Tests that 1 + 1 always equals 2. + was_published_recently() should return False for polls whose + pub_date is in the future """ - self.assertEqual(1 + 1, 2) + future_poll = Poll(pub_date=timezone.now() + datetime.timedelta(days=30)) + self.assertEqual(future_poll.was_published_recently(), False)
da952803636a0701331008a025b6789de89ce152
modloader/modclass.py
modloader/modclass.py
import modinfo class Mod(): """The Mod class This is supposed to act like a superclass for mods. Execution order is as follows: mod_load -> mod_complete """ def mod_info(self): """Get the mod info Returns: A tuple with the name, version, and author """ raise Exception("Mod info isn't overriden") def mod_load(self): """Executes when a mod is loaded This is where you put patcher code Other mods may not be fully loaded yet. If you want this functionality, see mod_complete """ pass def mod_complete(self): """Executes when all mods are loaded""" pass def loadable_mod(modclass): """Annotation to add a Mod subclass to the mod list Args: modclass (Mod): The Mod class Raises: Exception: If the given class is not a subclass of Mod """ if not issubclass(modclass, Mod): raise Exception("Class must be a subclass of Mod") mod = modclass() # Create a new instance of the class mod_name, _, _ = mod.mod_info() # Get just the mod name mod.mod_load() # Load the mod modinfo.add_mod(mod_name, mod)
import modinfo import sys class Mod(): """The Mod class This is supposed to act like a superclass for mods. Execution order is as follows: mod_load -> mod_complete """ def mod_info(self): """Get the mod info Returns: A tuple with the name, version, and author """ raise Exception("Mod info isn't overriden") def mod_load(self): """Executes when a mod is loaded This is where you put patcher code Other mods may not be fully loaded yet. If you want this functionality, see mod_complete """ pass def mod_complete(self): """Executes when all mods are loaded""" pass def loadable_mod(modclass): """Annotation to add a Mod subclass to the mod list Args: modclass (Mod): The Mod class Raises: Exception: If the given class is not a subclass of Mod """ if not issubclass(modclass, Mod): raise Exception("Class must be a subclass of Mod") mod = modclass() # Create a new instance of the class mod_name, version, author = mod.mod_info() mod.mod_load() # Load the mod modinfo.add_mod(modclass.__module__, (mod, mod_name, version, author, sys.modules[modclass.__module__]))
Store more data about a mod in the registry
Store more data about a mod in the registry
Python
mit
AWSW-Modding/AWSW-Modtools
--- +++ @@ -1,4 +1,5 @@ import modinfo +import sys class Mod(): """The Mod class @@ -40,6 +41,6 @@ raise Exception("Class must be a subclass of Mod") mod = modclass() # Create a new instance of the class - mod_name, _, _ = mod.mod_info() # Get just the mod name + mod_name, version, author = mod.mod_info() mod.mod_load() # Load the mod - modinfo.add_mod(mod_name, mod) + modinfo.add_mod(modclass.__module__, (mod, mod_name, version, author, sys.modules[modclass.__module__]))
17655f4b099ac840712dd95ad989f7b41301b83c
case_conversion/__init__.py
case_conversion/__init__.py
from case_conversion import ( camelcase, pascalcase, snakecase, dashcase, kebabcase, spinalcase, constcase, dotcase, separate_words, slashcase, backslashcase)
from __future__ import absolute_import from .case_conversion import ( camelcase, pascalcase, snakecase, dashcase, kebabcase, spinalcase, constcase, dotcase, separate_words, slashcase, backslashcase)
Fix import errors from implicit-relative imports.
Fix import errors from implicit-relative imports.
Python
mit
AlejandroFrias/case-conversion
--- +++ @@ -1,3 +1,5 @@ -from case_conversion import ( +from __future__ import absolute_import + +from .case_conversion import ( camelcase, pascalcase, snakecase, dashcase, kebabcase, spinalcase, constcase, dotcase, separate_words, slashcase, backslashcase)
2945ae3bb8dd85bd96546cef4ff1e297774d7190
checker/checker/__init__.py
checker/checker/__init__.py
#!/usr/bin/python3 from checker.local import LocalChecker as BaseChecker #from checker.contest import ContestChecker as BaseChecker OK = 0 TIMEOUT = 1 NOTWORKING = 2 NOTFOUND = 3
#!/usr/bin/python3 from checker.local import LocalChecker as BaseChecker #from checker.contest import ContestChecker as BaseChecker OK = 0 TIMEOUT = 1 NOTWORKING = 2 NOTFOUND = 3 _mapping = ["OK", "TIMEOUT", "NOTWORKING", "NOTFOUND"] def string_to_result(strresult): return _mapping.index(strresult) def result_to_string(result): return _mapping[result]
Add forward/reverse mapping of checkerstati
Add forward/reverse mapping of checkerstati
Python
isc
fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver
--- +++ @@ -7,3 +7,11 @@ TIMEOUT = 1 NOTWORKING = 2 NOTFOUND = 3 + +_mapping = ["OK", "TIMEOUT", "NOTWORKING", "NOTFOUND"] + +def string_to_result(strresult): + return _mapping.index(strresult) + +def result_to_string(result): + return _mapping[result]
1a837e84a129e99f7734fe0ffdc6ff3a239ecc4a
ci/generate_pipeline_yml.py
ci/generate_pipeline_yml.py
#!/usr/bin/env python import os from jinja2 import Template clusters = ['2_3', '2_4', '2_5', '2_6'] # Commenting out this as we only have one example and it breaks tiles = [] # [d for d in os.listdir('../examples') if os.path.isdir(os.path.join('../examples', d))] with open('pipeline.yml.jinja2', 'r') as f: t = Template(f.read()); with open('pipeline.yml', 'w') as f: f.write(t.render(clusters=clusters, tiles=tiles)) print("Successfully generated pipeline.yml")
#!/usr/bin/env python import os from jinja2 import Template clusters = ['2_5', '2_6', '2_7'] # Commenting out this as we only have one example and it breaks tiles = [] # [d for d in os.listdir('../examples') if os.path.isdir(os.path.join('../examples', d))] with open('pipeline.yml.jinja2', 'r') as f: t = Template(f.read()); with open('pipeline.yml', 'w') as f: f.write(t.render(clusters=clusters, tiles=tiles)) print("Successfully generated pipeline.yml")
Remove 2.3 and 2.4 from CI pipeline
Remove 2.3 and 2.4 from CI pipeline
Python
apache-2.0
cf-platform-eng/tile-generator,cf-platform-eng/tile-generator,cf-platform-eng/tile-generator,cf-platform-eng/tile-generator
--- +++ @@ -3,7 +3,7 @@ import os from jinja2 import Template -clusters = ['2_3', '2_4', '2_5', '2_6'] +clusters = ['2_5', '2_6', '2_7'] # Commenting out this as we only have one example and it breaks tiles = [] # [d for d in os.listdir('../examples') if os.path.isdir(os.path.join('../examples', d))]
60317dda9795391dd6468b573f5e1038ae1fe384
src/apps/utils/db.py
src/apps/utils/db.py
# -*- coding: utf-8 -*- from __future__ import absolute_import def retrieve_in_order_from_db(model, ids): """ Retrieve entities of the given model from the RDBMS in order given their ids. :param model: model of the entities :param ids: ids of the entities :return: a list of entities """ # Retrieve from RDBMS entities = model.objects.in_bulk(ids) #TODO: prefetch_related # Order by search order ordered_entities = [entities.get(id, None) for id in ids] # Filter not found entities filtered_entities = filter(None, ordered_entities) return filtered_entities
# -*- coding: utf-8 -*- from __future__ import absolute_import def retrieve_in_order_from_db(model, ids, prefetch=True): """ Retrieve entities of the given model from the RDBMS in order given their ids. :param model: model of the entities :param ids: ids of the entities :param prefetch: prefetch many-to-many relationships :return: a list of entities """ # Prefetch related if prefetch: relationships = [m2m.attname for m2m in model._meta._many_to_many()] entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids) else: entities = model.objects.in_bulk(ids) # Order by search order ordered_entities = [entities.get(id, None) for id in ids] # Filter not found entities filtered_entities = filter(None, ordered_entities) return filtered_entities
Optimize DB access: use of prefetch_related()
Optimize DB access: use of prefetch_related()
Python
apache-2.0
dvalcarce/filmyou-web,dvalcarce/filmyou-web,dvalcarce/filmyou-web
--- +++ @@ -3,17 +3,20 @@ from __future__ import absolute_import -def retrieve_in_order_from_db(model, ids): +def retrieve_in_order_from_db(model, ids, prefetch=True): """ Retrieve entities of the given model from the RDBMS in order given their ids. :param model: model of the entities :param ids: ids of the entities + :param prefetch: prefetch many-to-many relationships :return: a list of entities """ - # Retrieve from RDBMS - entities = model.objects.in_bulk(ids) - - #TODO: prefetch_related + # Prefetch related + if prefetch: + relationships = [m2m.attname for m2m in model._meta._many_to_many()] + entities = model.objects.all().prefetch_related(*relationships).in_bulk(ids) + else: + entities = model.objects.in_bulk(ids) # Order by search order ordered_entities = [entities.get(id, None) for id in ids]
b76b3cbe0d86bd5037ccfd21086ab50803606ec2
autobuilder/webhooks.py
autobuilder/webhooks.py
from buildbot.status.web.hooks.github import GitHubEventHandler from twisted.python import log import abconfig class AutobuilderGithubEventHandler(GitHubEventHandler): def handle_push(self, payload): # This field is unused: user = None # user = payload['pusher']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] # NOTE: what would be a reasonable value for project? # project = request.args.get('project', [''])[0] project = abconfig.get_project_for_url(repo_url, default_if_not_found=payload['repository']['full_name']) changes = self._process_change(payload, user, repo, repo_url, project) log.msg("Received %d changes from github" % len(changes)) return changes, 'git'
from buildbot.status.web.hooks.github import GitHubEventHandler from twisted.python import log import abconfig def codebasemap(payload): return abconfig.get_project_for_url(payload['repository']['url']) class AutobuilderGithubEventHandler(GitHubEventHandler): def __init__(self, secret, strict codebase=None): if codebase is None: codebase = codebasemap GitHubEventHandler.__init__(self, secret, strict, codebase) def handle_push(self, payload): # This field is unused: user = None # user = payload['pusher']['name'] repo = payload['repository']['name'] repo_url = payload['repository']['url'] # NOTE: what would be a reasonable value for project? # project = request.args.get('project', [''])[0] project = abconfig.get_project_for_url(repo_url, default_if_not_found=payload['repository']['full_name']) changes = self._process_change(payload, user, repo, repo_url, project) log.msg("Received %d changes from github" % len(changes)) return changes, 'git'
Add a codebase generator to the Github web hoook handler, to map the URL to the repo name for use as the codebase.
Add a codebase generator to the Github web hoook handler, to map the URL to the repo name for use as the codebase.
Python
mit
madisongh/autobuilder
--- +++ @@ -2,7 +2,15 @@ from twisted.python import log import abconfig +def codebasemap(payload): + return abconfig.get_project_for_url(payload['repository']['url']) + class AutobuilderGithubEventHandler(GitHubEventHandler): + + def __init__(self, secret, strict codebase=None): + if codebase is None: + codebase = codebasemap + GitHubEventHandler.__init__(self, secret, strict, codebase) def handle_push(self, payload): # This field is unused:
fa1a383aa194f028e9aa6eb4ff474281dd7c5bfe
team2/python/rasacalculator.py
team2/python/rasacalculator.py
import sys;s='%s: lines %d, RaSa: %d' def u(z): r=I=0;b=1 for m in open(z): r+=1 for k in m: if '{'==k:b+=1 if ';'==k:I+=b if '}'==k:b-=1 return(r,I) c=D=0 for z in sys.argv[1:]: r,I=u(z);c+=r;D+=I;print s%(z,r,I) print s%('total',c,D)
#!/usr/bin/env python import argparse def calculate_file_rasa(file_path): row_count = 0 multiplier = 1 rasa = 0 for line in open(file_path): row_count += 1 for char in line: if char == '{': multiplier += 1 if char == ';': rasa += multiplier if char == '}': multiplier -= 1 return (row_count, rasa) def main(args): total_rows = 0 total_rasa = 0 for file_path in args.argument: row_count, rasa = calculate_file_rasa(file_path) total_rows += row_count total_rasa += rasa print '%s: lines %d, RaSa: %d' % (file_path, row_count, rasa) print 'total: lines %d, RaSa: %d' % (total_rows, total_rasa) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('argument', nargs='*') main(parser.parse_args())
Revert to try cleanest solution
Revert to try cleanest solution
Python
mit
jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015,jketo/arcusysdevday2015
--- +++ @@ -1,14 +1,36 @@ -import sys;s='%s: lines %d, RaSa: %d' -def u(z): - r=I=0;b=1 - for m in open(z): - r+=1 - for k in m: - if '{'==k:b+=1 - if ';'==k:I+=b - if '}'==k:b-=1 - return(r,I) -c=D=0 -for z in sys.argv[1:]: - r,I=u(z);c+=r;D+=I;print s%(z,r,I) -print s%('total',c,D) +#!/usr/bin/env python +import argparse + + +def calculate_file_rasa(file_path): + row_count = 0 + multiplier = 1 + rasa = 0 + + for line in open(file_path): + row_count += 1 + for char in line: + if char == '{': + multiplier += 1 + if char == ';': + rasa += multiplier + if char == '}': + multiplier -= 1 + return (row_count, rasa) + + +def main(args): + total_rows = 0 + total_rasa = 0 + for file_path in args.argument: + row_count, rasa = calculate_file_rasa(file_path) + total_rows += row_count + total_rasa += rasa + print '%s: lines %d, RaSa: %d' % (file_path, row_count, rasa) + print 'total: lines %d, RaSa: %d' % (total_rows, total_rasa) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('argument', nargs='*') + main(parser.parse_args())
c03241320138fe7b545b43514e93615473270b0d
netbox/dcim/fields.py
netbox/dcim/fields.py
from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from netaddr import AddrFormatError, EUI, mac_unix_expanded import pprint class ASNField(models.BigIntegerField): description = "32-bit ASN field" default_validators = [ MinValueValidator(1), MaxValueValidator(4294967295), ] class mac_unix_expanded_uppercase(mac_unix_expanded): word_fmt = '%.2X' class MACAddressField(models.Field): description = "PostgreSQL MAC Address field" def python_type(self): return EUI def from_db_value(self, value, expression, connection, context): return self.to_python(value) def to_python(self, value): if value is None: return value try: return EUI(value, version=48, dialect=mac_unix_expanded_uppercase) except AddrFormatError as e: raise ValidationError("Invalid MAC address format: {}".format(value)) def db_type(self, connection): return 'macaddr' def get_prep_value(self, value): if not value: return None return str(self.to_python(value))
from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from netaddr import AddrFormatError, EUI, mac_unix_expanded class ASNField(models.BigIntegerField): description = "32-bit ASN field" default_validators = [ MinValueValidator(1), MaxValueValidator(4294967295), ] class mac_unix_expanded_uppercase(mac_unix_expanded): word_fmt = '%.2X' class MACAddressField(models.Field): description = "PostgreSQL MAC Address field" def python_type(self): return EUI def from_db_value(self, value, expression, connection, context): return self.to_python(value) def to_python(self, value): if value is None: return value try: return EUI(value, version=48, dialect=mac_unix_expanded_uppercase) except AddrFormatError as e: raise ValidationError("Invalid MAC address format: {}".format(value)) def db_type(self, connection): return 'macaddr' def get_prep_value(self, value): if not value: return None return str(self.to_python(value))
Remove unneeded import from testing.
Remove unneeded import from testing.
Python
apache-2.0
lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox,lampwins/netbox,digitalocean/netbox
--- +++ @@ -2,7 +2,6 @@ from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from netaddr import AddrFormatError, EUI, mac_unix_expanded -import pprint class ASNField(models.BigIntegerField):
4a8b1a7633279e3276fceb3e12eca852dc583764
baro.py
baro.py
from datetime import datetime import utils class Baro: """This class contains info about the Void Trader and is initialized with data in JSON format """ def __init__(self, data): self.config = data['Config'] self.start = datetime.fromtimestamp(data['Activation']['sec']) self.end = datetime.fromtimestamp(data['Expiry']['sec']) self.location = data['Node'] self.manifest = data['Manifest'] def __str__(self): """Returns a string with all the information about Baro's offers """ baroItemString = "" if datetime.now() < self.start: return "None" else: for item in self.manifest: baroItemString += ('== '+ str(item["ItemType"]) +' ==\n' '- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' ) return baroItemString def get_end_string(self): """Returns a string containing Baro's departure time """ return timedelta_to_string(self.end - datetime.now()) def get_start_string(self): """Returns a string containing Baro's arrival time """ return timedelta_to_string(self.start - datetime.now())
from datetime import datetime import utils class Baro: """This class contains info about the Void Trader and is initialized with data in JSON format """ def __init__(self, data): self.config = data['Config'] self.start = datetime.fromtimestamp(data['Activation']['sec']) self.end = datetime.fromtimestamp(data['Expiry']['sec']) self.location = data['Node'] self.manifest = data['Manifest'] def __str__(self): """Returns a string with all the information about Baro's offers """ baroItemString = "" if datetime.now() < self.start: return "None" else: for item in self.manifest: baroItemString += ('== '+ str(item["ItemType"]) +' ==\n' '- price: '+ str(item["PrimePrice"]) +' ducats + '+ str(item["RegularPrice"]) +'cr -\n\n' ) return baroItemString def is_active(self): """Returns True if the Void Trader is currently active, False otherwise """ return (self.start < datetime.now() and self.end > datetime.now()) def get_end_string(self): """Returns a string containing Baro's departure time """ return timedelta_to_string(self.end - datetime.now()) def get_start_string(self): """Returns a string containing Baro's arrival time """ return timedelta_to_string(self.start - datetime.now())
Add is_active() method to the Baro class
Add is_active() method to the Baro class
Python
mit
pabletos/Hubot-Warframe,pabletos/Hubot-Warframe
--- +++ @@ -31,6 +31,12 @@ return baroItemString + def is_active(self): + """Returns True if the Void Trader is currently active, False otherwise + + """ + return (self.start < datetime.now() and self.end > datetime.now()) + def get_end_string(self): """Returns a string containing Baro's departure time
3e8921b2edcf8a675b6ed496cf5e282c76cc2070
retrieveData.py
retrieveData.py
#!/usr/bin/env python import json, os, requests from models import db, FoodMenu, FoodServices key = os.environ.get('UWOPENDATA_APIKEY') def getData(service): payload = {'key': key, 'service': service} r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload) return r foodMenu = getData('FoodMenu').text foodMenuData = FoodMenu(foodMenu) serviceInfo = getData('FoodServices').text serviceInfoData = FoodServices(serviceInfo) db.session.add(foodMenuData) db.session.add(serviceInfoData) db.session.commit()
#!/usr/bin/env python import json, os, requests from models import db, FoodMenu, FoodServices key = os.environ.get('UWOPENDATA_APIKEY') def getData(service): payload = {'key': key, 'service': service} r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload) return r def retrieve(): payload = {'key': key} url = os.environ.get('API_URL') r = requests.get(url, params=payload) return r foodMenu = retrieve().text foodMenuData = FoodMenu(foodMenu) serviceInfo = getData('FoodServices').text serviceInfoData = FoodServices(serviceInfo) db.session.add(foodMenuData) db.session.add(serviceInfoData) db.session.commit()
Update retrieve() for FoodMenu data
Update retrieve() for FoodMenu data
Python
mit
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
--- +++ @@ -9,7 +9,13 @@ r = requests.get('http://api.uwaterloo.ca/public/v1/', params=payload) return r -foodMenu = getData('FoodMenu').text +def retrieve(): + payload = {'key': key} + url = os.environ.get('API_URL') + r = requests.get(url, params=payload) + return r + +foodMenu = retrieve().text foodMenuData = FoodMenu(foodMenu) serviceInfo = getData('FoodServices').text serviceInfoData = FoodServices(serviceInfo)
d86e475e0d87399ba7487f7b41b12657de997665
opentreemap/registration_backend/urls.py
opentreemap/registration_backend/urls.py
from django.conf.urls import patterns from django.conf.urls import include from django.conf.urls import url from django.views.generic.base import TemplateView from views import RegistrationView, ActivationView urlpatterns = patterns('', url(r'^activate/complete/$', TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA name='registration_activation_complete'), # Activation keys get matched by \w+ instead of the more specific # [a-fA-F0-9]{40} because a bad activation key should still get # to the view; that way it can return a sensible "invalid key" # message instead of a confusing 404. url(r'^activate/(?P<activation_key>\w+)/$', ActivationView.as_view(), name='registration_activate'), url(r'^register/$', RegistrationView.as_view(), name='registration_register'), url(r'^register/complete/$', TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA name='registration_complete'), url(r'^register/closed/$', TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA name='registration_disallowed'), (r'', include('registration.auth_urls')), ) # NOQA
from django.conf.urls import patterns from django.conf.urls import include from django.conf.urls import url from django.views.generic.base import TemplateView from views import RegistrationView, ActivationView urlpatterns = patterns('', url(r'^activation-complete/$', TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA name='registration_activation_complete'), # Activation keys get matched by \w+ instead of the more specific # [a-fA-F0-9]{40} because a bad activation key should still get # to the view; that way it can return a sensible "invalid key" # message instead of a confusing 404. url(r'^activate/(?P<activation_key>\w+)/$', ActivationView.as_view(), name='registration_activate'), url(r'^register/$', RegistrationView.as_view(), name='registration_register'), url(r'^register/complete/$', TemplateView.as_view(template_name='registration/registration_complete.html'), # NOQA name='registration_complete'), url(r'^register/closed/$', TemplateView.as_view(template_name='registration/registration_closed.html'), # NOQA name='registration_disallowed'), (r'', include('registration.auth_urls')), ) # NOQA
Change user activation complete URL to avoid conflicts
Change user activation complete URL to avoid conflicts Because a separate django app is overriding 'accounts/activate/w+' 'accounts/activate/complete' was never being hit. Fixes #1133
Python
agpl-3.0
recklessromeo/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,maurizi/otm-core,RickMohr/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core,RickMohr/otm-core,maurizi/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core
--- +++ @@ -8,7 +8,7 @@ urlpatterns = patterns('', - url(r'^activate/complete/$', + url(r'^activation-complete/$', TemplateView.as_view(template_name='registration/activation_complete.html'), # NOQA name='registration_activation_complete'), # Activation keys get matched by \w+ instead of the more specific
553735a857875abf54bc71b7b73569d223b4ccf7
tests/basic/test_union_find.py
tests/basic/test_union_find.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_union_find ---------------------------------- Tests for `python_algorithms.union_find` module. """ import unittest from python_algorithms.basic.union_find import UF class TestUnionFind(unittest.TestCase): def setUp(self): self.N = 10 self.uf = UF(self.N) self.pairs = ((0, 1), (1, 2), (4, 5), (7, 8), (8, 9)) def test_count(self): self.assertEqual(self.uf.count(), self.N) self.assertEqual(self.count_sets(), self.N) for x, y in self.pairs: self.uf.union(x, y) n = self.N - len(self.pairs) self.assertEqual(self.uf.count(), n) self.assertEqual(self.count_sets(), n) def test_str_empty_uf(self): self.assertEqual(str(UF(0)), "") def test_str_stack(self): self.assertEqual(str(self.uf), " ".join([str(x) for x in range(self.N)])) def count_sets(self): return len(set([self.uf.find(x) for x in range(self.N)])) def tearDown(self): pass if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_union_find ---------------------------------- Tests for `python_algorithms.union_find` module. """ import unittest from python_algorithms.basic.union_find import UF class TestUnionFind(unittest.TestCase): def setUp(self): self.N = 10 self.uf = UF(self.N) self.pairs = ((0, 1), (1, 2), (4, 5), (7, 8), (8, 9)) def test_count(self): self.assertEqual(self.uf.count(), self.N) self.assertEqual(self.count_sets(), self.N) for x, y in self.pairs: self.uf.union(x, y) n = self.N - len(self.pairs) self.assertEqual(self.uf.count(), n) self.assertEqual(self.count_sets(), n) def test_str_empty_uf(self): self.assertEqual(str(UF(0)), "") def test_str_uf(self): self.assertEqual(str(self.uf), " ".join([str(x) for x in range(self.N)])) def count_sets(self): return len(set([self.uf.find(x) for x in range(self.N)])) def tearDown(self): pass if __name__ == '__main__': unittest.main()
Fix minor typing issue in union find test.
Fix minor typing issue in union find test.
Python
bsd-3-clause
cjauvin/python_algorithms,ofenerci/python_algorithms,ofenerci/python_algorithms,pombredanne/python_algorithms,pombredanne/python_algorithms,pombredanne/python_algorithms,cjauvin/python_algorithms,cjauvin/python_algorithms,ofenerci/python_algorithms
--- +++ @@ -33,7 +33,7 @@ def test_str_empty_uf(self): self.assertEqual(str(UF(0)), "") - def test_str_stack(self): + def test_str_uf(self): self.assertEqual(str(self.uf), " ".join([str(x) for x in range(self.N)])) def count_sets(self):
566fc15f136076db5c421ca18f8b1fcb3d332229
ovp_projects/views.py
ovp_projects/views.py
from ovp_projects import serializers from ovp_projects import models from ovp_users import models as users_models from rest_framework import mixins from rest_framework import viewsets from rest_framework import response from rest_framework import status class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ ProjectResourceViewSet resource endpoint """ queryset = models.Project.objects.all() lookup_field = 'slug' lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field def get_serializer_class(self): #if self.action == 'create': return serializers.ProjectCreateSerializer def create(self, request, *args, **kwargs): user = users_models.User.objects.all().first() request.data['owner'] = user.pk serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.save() headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
from ovp_projects import serializers from ovp_projects import models from ovp_users import models as users_models from rest_framework import mixins from rest_framework import viewsets from rest_framework import response from rest_framework import status class ProjectResourceViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): """ ProjectResourceViewSet resource endpoint """ queryset = models.Project.objects.all() lookup_field = 'slug' lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field def get_serializer_class(self): if self.action == 'create': return serializers.ProjectCreateSerializer return serializers.ProjectSearchSerializer def create(self, request, *args, **kwargs): user = users_models.User.objects.all().first() request.data['owner'] = user.pk serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) serializer.save() headers = self.get_success_headers(serializer.data) return response.Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
Return ProjectSearchSerializer on ProjectResourceViewSet if action != 'Create'
Python
agpl-3.0
OpenVolunteeringPlatform/django-ovp-projects,OpenVolunteeringPlatform/django-ovp-projects
--- +++ @@ -17,8 +17,9 @@ lookup_value_regex = '[^/]+' # default is [^/.]+ - here we're allowing dots in the url slug field def get_serializer_class(self): - #if self.action == 'create': - return serializers.ProjectCreateSerializer + if self.action == 'create': + return serializers.ProjectCreateSerializer + return serializers.ProjectSearchSerializer def create(self, request, *args, **kwargs): user = users_models.User.objects.all().first()
9f9441cf43e66780ca7f24197d3cd9ece923dd30
kiva/quartz/__init__.py
kiva/quartz/__init__.py
# :Author: Robert Kern # :Copyright: 2004, Enthought, Inc. # :License: BSD Style from mac_context import get_mac_context def get_macport(dc): """ Returns the Port or the CGContext of a wxDC (or child class) instance. """ if 'GetCGContext' in dir(dc): ptr = dc.GetCGContext() return int(ptr) else: from macport import get_macport as _get_macport return _get_macport(str(dc.this))
# :Author: Robert Kern # :Copyright: 2004, Enthought, Inc. # :License: BSD Style try: from mac_context import get_mac_context except ImportError: get_mac_context = None def get_macport(dc): """ Returns the Port or the CGContext of a wxDC (or child class) instance. """ if 'GetCGContext' in dir(dc): ptr = dc.GetCGContext() return int(ptr) else: from macport import get_macport as _get_macport return _get_macport(str(dc.this))
Allow kiva.quartz to be imported on non-darwin platforms without error.
Allow kiva.quartz to be imported on non-darwin platforms without error.
Python
bsd-3-clause
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
--- +++ @@ -2,7 +2,10 @@ # :Copyright: 2004, Enthought, Inc. # :License: BSD Style -from mac_context import get_mac_context +try: + from mac_context import get_mac_context +except ImportError: + get_mac_context = None def get_macport(dc):
1d361c8a743868b66bec7bd506aa0e33b19ed59c
opps/__init__.py
opps/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import pkg_resources pkg_resources.declare_namespace(__name__) VERSION = (0, 2, 1) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Open Source Content Management Platform - CMS for the " u"magazines, newspappers websites and portals with " u"high-traffic, using the Django Framework." __author__ = u"Thiago Avelino" __credits__ = ['Bruno Rocha'] __email__ = u"[email protected]" __license__ = u"MIT License" __copyright__ = u"Copyright 2013, Opps Project"
#!/usr/bin/env python # -*- coding: utf-8 -*- import pkg_resources pkg_resources.declare_namespace(__name__) VERSION = (0, 2, 2) __version__ = ".".join(map(str, VERSION)) __status__ = "Development" __description__ = u"Open Source Content Management Platform - CMS for the " u"magazines, newspappers websites and portals with " u"high-traffic, using the Django Framework." __author__ = u"Thiago Avelino" __credits__ = ['Bruno Rocha'] __email__ = u"[email protected]" __license__ = u"MIT License" __copyright__ = u"Copyright 2013, Opps Project"
Set new developer version `0.2.2`
Set new developer version `0.2.2`
Python
mit
YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,opps/opps,opps/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps
--- +++ @@ -4,7 +4,7 @@ pkg_resources.declare_namespace(__name__) -VERSION = (0, 2, 1) +VERSION = (0, 2, 2) __version__ = ".".join(map(str, VERSION)) __status__ = "Development"
412265731720b8df9630cbe1ec3bd307986137ad
bob/db/base/__init__.py
bob/db/base/__init__.py
#!/usr/bin/env python # Andre Anjos <[email protected]> # Thu 23 Jun 20:22:28 2011 CEST # vim: set fileencoding=utf-8 : """The db package contains simplified APIs to access data for various databases that can be used in Biometry, Machine Learning or Pattern Classification.""" import pkg_resources __version__ = pkg_resources.require(__name__)[0].version def get_config(): """Returns a string containing the configuration information. """ import bob.extension return bob.extension.get_config(__name__) from . import utils, driver from .file import File from .database import Database, SQLiteDatabase # gets sphinx autodoc done right - don't remove it __all__ = [_ for _ in dir() if not _.startswith('_')]
#!/usr/bin/env python # Andre Anjos <[email protected]> # Thu 23 Jun 20:22:28 2011 CEST # vim: set fileencoding=utf-8 : """The db package contains simplified APIs to access data for various databases that can be used in Biometry, Machine Learning or Pattern Classification.""" import pkg_resources from . import utils, driver from .file import File from .database import Database, SQLiteDatabase __version__ = pkg_resources.require(__name__)[0].version def get_config(): """Returns a string containing the configuration information. """ import bob.extension return bob.extension.get_config(__name__) # gets sphinx autodoc done right - don't remove it __all__ = [_ for _ in dir() if not _.startswith('_')]
Add a low-level database API
Add a low-level database API
Python
bsd-3-clause
bioidiap/bob.db.base
--- +++ @@ -8,7 +8,11 @@ import pkg_resources +from . import utils, driver +from .file import File +from .database import Database, SQLiteDatabase __version__ = pkg_resources.require(__name__)[0].version + def get_config(): """Returns a string containing the configuration information. @@ -17,9 +21,5 @@ return bob.extension.get_config(__name__) -from . import utils, driver -from .file import File -from .database import Database, SQLiteDatabase - # gets sphinx autodoc done right - don't remove it __all__ = [_ for _ in dir() if not _.startswith('_')]
98b6f81f68ce4338e932afc14b7b9d4c8a810e71
src/dirtyfields/dirtyfields.py
src/dirtyfields/dirtyfields.py
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django from django.db.models.signals import post_save class DirtyFieldsMixin(object): def __init__(self, *args, **kwargs): super(DirtyFieldsMixin, self).__init__(*args, **kwargs) post_save.connect(reset_state, sender=self.__class__, dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__) reset_state(sender=self.__class__, instance=self) def _as_dict(self): return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel]) def get_dirty_fields(self): new_state = self._as_dict() return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]]) def is_dirty(self): # in order to be dirty we need to have been saved at least once, so we # check for a primary key and we need our dirty fields to not be empty if not self.pk: return True return {} != self.get_dirty_fields() def reset_state(sender, instance, **kwargs): instance._original_state = instance._as_dict()
# Adapted from http://stackoverflow.com/questions/110803/dirty-fields-in-django from django.db.models.signals import post_save class DirtyFieldsMixin(object): def __init__(self, *args, **kwargs): super(DirtyFieldsMixin, self).__init__(*args, **kwargs) post_save.connect(reset_state, sender=self.__class__, dispatch_uid='%s-DirtyFieldsMixin-sweeper' % self.__class__.__name__) reset_state(sender=self.__class__, instance=self) def _as_dict(self): return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel]) def get_dirty_fields(self): new_state = self._as_dict() return dict([(key, value) for key, value in self._original_state.iteritems() if value != new_state[key]]) def is_dirty(self): # in order to be dirty we need to have been saved at least once, so we # check for a primary key and we need our dirty fields to not be empty if not self.pk: return True return {} != self.get_dirty_fields() def reset_state(sender, instance, **kwargs): instance._original_state = instance._as_dict()
Use field.to_python to do django type conversions on the field before checking if dirty.
Use field.to_python to do django type conversions on the field before checking if dirty. This solves issues where you might have a decimal field that you write a string to, eg: >>> m = MyModel.objects.get(id=1) >>> m.my_decimal_field Decimal('1.00') >>> m.my_decimal_field = u'1.00' # from a form or something >>> m.is_dirty() # currently evaluates to True, should evaluate to False False This pull request could probably use some unit testing, but it should be safe as the base class for django fields defines to_python as: def to_python(self, value): return value So, any field type that does not have an explicit to_python method will behave as before this change.
Python
bsd-3-clause
mattcaldwell/django-dirtyfields,georgemarshall/django-dirtyfields
--- +++ @@ -9,7 +9,7 @@ reset_state(sender=self.__class__, instance=self) def _as_dict(self): - return dict([(f.name, getattr(self, f.name)) for f in self._meta.local_fields if not f.rel]) + return dict([(f.name, f.to_python(getattr(self, f.name))) for f in self._meta.local_fields if not f.rel]) def get_dirty_fields(self): new_state = self._as_dict()
1a2cabca5be1b9682e39db12bd52c26f5bb8b5b9
src/dicomweb_client/ext/gcp/session_utils.py
src/dicomweb_client/ext/gcp/session_utils.py
"""Session management utilities for Google Cloud Platform (GCP).""" from typing import Optional, Any try: import google.auth from google.auth.transport import requests as google_requests except ImportError: raise ImportError( 'The `dicomweb-client` package needs to be installed with the ' '"gcp" extra requirements to use this module, as follows: ' '`pip install dicomweb-client[gcp]`') import requests def create_session_from_gcp_credentials( google_credentials: Optional[Any] = None ) -> requests.Session: """Creates an authorized session for Google Cloud Platform. Parameters ---------- google_credentials: Any Google Cloud credentials. (see https://cloud.google.com/docs/authentication/production for more information on Google Cloud authentication). If not set, will be initialized to ``google.auth.default()``. Returns ------- requests.Session Google Cloud authorized session. """ if google_credentials is None: google_credentials, _ = google.auth.default( scopes=['https://www.googleapis.com/auth/cloud-platform'] ) return google_requests.AuthorizedSession(google_credentials)
"""Session management utilities for Google Cloud Platform (GCP).""" from typing import Optional, Any try: import google.auth from google.auth.transport import requests as google_requests except ImportError: raise ImportError( 'The `dicomweb-client` package needs to be installed with the ' '"gcp" extra requirements to use this module, as follows: ' '`pip install dicomweb-client[gcp]`') import requests def create_session_from_gcp_credentials( google_credentials: Optional[Any] = None ) -> requests.Session: """Creates an authorized session for Google Cloud Platform. Parameters ---------- google_credentials: Any Google Cloud credentials. (see https://cloud.google.com/docs/authentication/production for more information on Google Cloud authentication). If not set, will be initialized to ``google.auth.default()``. Returns ------- requests.Session Google Cloud authorized session. Note ---- Credentials will be read from environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` if set. """ if google_credentials is None: google_credentials, _ = google.auth.default( scopes=['https://www.googleapis.com/auth/cloud-platform'] ) return google_requests.AuthorizedSession(google_credentials)
Add note to gcp session utils method
Add note to gcp session utils method
Python
mit
MGHComputationalPathology/dicomweb-client
--- +++ @@ -29,6 +29,12 @@ ------- requests.Session Google Cloud authorized session. + + Note + ---- + Credentials will be read from environment variable + ``GOOGLE_APPLICATION_CREDENTIALS`` if set. + """ if google_credentials is None: google_credentials, _ = google.auth.default(
42efd09692dffc67e58050a24a49ee874a8c105d
stats/sendcommand.py
stats/sendcommand.py
import os import models import config import time from hashlib import md5 import json import serverinfo from google.appengine.ext import webapp """ { "info": { "name": "<name>", "start_utc": <long> }, "command": { "command": "<command name>", "<arg0 name>": "<arg0 value>", "<arg1 name>": "<arg1 value>" // etc } } """ class SendCommand(webapp.RequestHandler): def post(self): hash = self.request.body[:32] j = self.request.body[32:] m = md5(json + config.SENDCOMMAND_SECRET) if m.hexdigest() == hash: c = json.loads(j) serverinfo.ServerInfo.send_command(c['info'], json.dumps(c['command'])) if config.is_debug(): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('ok') else: if config.is_debug(): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('not ok')
import os import models import config import time from hashlib import md5 import json import serverinfo from google.appengine.ext import webapp """ { "info": { "name": "<name>", "start_utc": <long> }, "command": { "command": "<command name>", "<arg0 name>": "<arg0 value>", "<arg1 name>": "<arg1 value>" // etc } } """ class SendCommand(webapp.RequestHandler): def post(self): hash = self.request.body[:32] j = self.request.body[32:] m = md5(j + config.SENDCOMMAND_SECRET) if m.hexdigest() == hash: c = json.loads(j) serverinfo.ServerInfo.send_command(c['info'], json.dumps(c['command'])) if config.is_debug(): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('ok') else: if config.is_debug(): self.response.headers['Content-Type'] = 'text/plain' self.response.out.write('not ok')
Fix use of 'json' instead of 'j'
Fix use of 'json' instead of 'j' This bug was created when switching from simplejson to json module, due to python 2.5->2.7 migration. Any variables named 'json' needed to be renamed, and this is an instance that was missed.
Python
bsd-2-clause
spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover,spiffcode/hostile-takeover
--- +++ @@ -28,7 +28,7 @@ def post(self): hash = self.request.body[:32] j = self.request.body[32:] - m = md5(json + config.SENDCOMMAND_SECRET) + m = md5(j + config.SENDCOMMAND_SECRET) if m.hexdigest() == hash: c = json.loads(j) serverinfo.ServerInfo.send_command(c['info'],
84ec75ff6262d7926c0de87dffbeddb223fd190b
core/settings.py
core/settings.py
# -*- encoding: UTF-8 -*- from enum import IntEnum class LogType(IntEnum): CVN_STATUS = 0 AUTH_ERROR = 1 LOG_TYPE = ( (LogType.CVN_STATUS, 'CVN_STATUS'), (LogType.AUTH_ERROR, 'AUTH_ERROR'), ) BASE_URL_FLATPAGES = '/investigacion/faq/'
# -*- encoding: UTF-8 -*- from enum import IntEnum class LogType(IntEnum): CVN_STATUS = 0 AUTH_ERROR = 1 LOG_TYPE = ( (LogType.CVN_STATUS.value, 'CVN_STATUS'), (LogType.AUTH_ERROR.value, 'AUTH_ERROR'), ) BASE_URL_FLATPAGES = '/investigacion/faq/'
Fix a bug in LogType that broke migrations creation
Fix a bug in LogType that broke migrations creation
Python
agpl-3.0
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
--- +++ @@ -8,8 +8,8 @@ AUTH_ERROR = 1 LOG_TYPE = ( - (LogType.CVN_STATUS, 'CVN_STATUS'), - (LogType.AUTH_ERROR, 'AUTH_ERROR'), + (LogType.CVN_STATUS.value, 'CVN_STATUS'), + (LogType.AUTH_ERROR.value, 'AUTH_ERROR'), ) BASE_URL_FLATPAGES = '/investigacion/faq/'
ad8908753e31420f489f8e5fe2f1c5eac5a5c42a
alexandria/drivers.py
alexandria/drivers.py
# coding=utf-8 import types import config class Driver(object): def __init__(self): self.driver_type = self.__class__.__name__ # Get credentials from conf files for CMDB pass def get_driver_type(self): return self.driver_type def get_ci(self): pass def push_ci(self): pass class Itop(Driver): def get_ci(self): print "Get from itop" return True def push_ci(self): pass class Redfish(Driver): def get_ci(self): print "Get from redfish" return True pass class Ironic(Driver): pass class Mondorescue(Driver): pass class Fakecmdb(Driver): pass class Fakeprovider(Driver): pass class DriverCollection(list): pass
# coding=utf-8 import types import config class Driver(object): def __init__(self): self.driver_type = self.__class__.__name__ # Get credentials from conf files for CMDB pass def get_driver_type(self): return self.driver_type def get_ci(self,ci): pass def push_ci(self,ci): pass class Itop(Driver): def get_ci(self,ci): print "Get from itop" return True def push_ci(self): pass class Redfish(Driver): def get_ci(self,ci): print "Get from redfish" return True class Ironic(Driver): pass class Mondorescue(Driver): pass class Fakecmdb(Driver): pass class Fakeprovider(Driver): def get_ci(self,ci): import app # Simulate a driver that will provide Manager data. # TODO a connect method must be implemented as # Assuming the connection is ok. # Now create a manager model from reference model. ci.ci_type = "Manager" ci.data = config.alexandria.model.Manager class DriverCollection(list): pass
Add ci parameter to get_ci() and push_ci() methods.
Add ci parameter to get_ci() and push_ci() methods.
Python
apache-2.0
sl4shme/alexandria,sl4shme/alexandria,sl4shme/alexandria,uggla/alexandria
--- +++ @@ -14,16 +14,16 @@ def get_driver_type(self): return self.driver_type - def get_ci(self): + def get_ci(self,ci): pass - def push_ci(self): + def push_ci(self,ci): pass class Itop(Driver): - def get_ci(self): + def get_ci(self,ci): print "Get from itop" return True @@ -32,11 +32,9 @@ class Redfish(Driver): - def get_ci(self): + def get_ci(self,ci): print "Get from redfish" return True - - pass class Ironic(Driver): pass @@ -48,7 +46,18 @@ pass class Fakeprovider(Driver): - pass + + def get_ci(self,ci): + import app + # Simulate a driver that will provide Manager data. + + # TODO a connect method must be implemented as + + # Assuming the connection is ok. + + # Now create a manager model from reference model. + ci.ci_type = "Manager" + ci.data = config.alexandria.model.Manager class DriverCollection(list):
5a6970349ace3ddcf12cfac6bc72ec6dbc3424a2
django_bcrypt/models.py
django_bcrypt/models.py
import bcrypt from django.contrib.auth.models import User from django.conf import settings try: rounds = settings.BCRYPT_ROUNDS except AttributeError: rounds = 12 _check_password = User.check_password def bcrypt_check_password(self, raw_password): if self.password.startswith('bc$'): salt_and_hash = self.password[3:] return bcrypt.hashpw(raw_password, salt_and_hash) == salt_and_hash return _check_password(self, raw_password) def bcrypt_set_password(self, raw_password): salt = bcrypt.gensalt(rounds) self.password = 'bc$' + bcrypt.hashpw(raw_password, salt) User.check_password = bcrypt_check_password User.set_password = bcrypt_set_password
import bcrypt from django.contrib.auth.models import User from django.conf import settings rounds = getattr(settings, "BCRYPT_ROUNDS", 12) _check_password = User.check_password def bcrypt_check_password(self, raw_password): if self.password.startswith('bc$'): salt_and_hash = self.password[3:] return bcrypt.hashpw(raw_password, salt_and_hash) == salt_and_hash return _check_password(self, raw_password) def bcrypt_set_password(self, raw_password): salt = bcrypt.gensalt(rounds) self.password = 'bc$' + bcrypt.hashpw(raw_password, salt) User.check_password = bcrypt_check_password User.set_password = bcrypt_set_password
Make the default setting retrieval more elegant.
Make the default setting retrieval more elegant.
Python
mit
dwaiter/django-bcrypt
--- +++ @@ -3,11 +3,7 @@ from django.conf import settings -try: - rounds = settings.BCRYPT_ROUNDS -except AttributeError: - rounds = 12 - +rounds = getattr(settings, "BCRYPT_ROUNDS", 12) _check_password = User.check_password def bcrypt_check_password(self, raw_password):
3043141a7064a479a29509ee441f104642abe84b
src/ipf/ipfblock/connection.py
src/ipf/ipfblock/connection.py
# -*- coding: utf-8 -*- import ioport class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.compatible(oport, iport) and iport.is_free(): self._oport = oport self._iport = iport self._oport.increase_binded_count() self._iport.set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport.decrease_binded_count() self._iport.set_free() def transmit(self): """ Send value from output port to input port """ self._iport.pass_value(self._oport.get_valure())
# -*- coding: utf-8 -*- import ioport class Connection(object): """ Connection class for IPFBlock Connection binding OPort and IPort of some IPFBlocks """ def __init__(self, oport, iport): # Check port compatibility and free of input port if ioport.compatible(oport, iport) and iport.is_free(): self._oport = oport self._iport = iport self._oport.increase_binded_count() self._iport.set_binded() else: raise ValueError("Can not create Connection with given ports") def __del__(self): self._oport.decrease_binded_count() self._iport.set_free() def process(self): """ Send value from output port to input port """ self._iport.pass_value(self._oport.get_valure())
Rename Connection transmit function to process for use in IPFGraph
Rename Connection transmit function to process for use in IPFGraph
Python
lgpl-2.1
anton-golubkov/Garland,anton-golubkov/Garland
--- +++ @@ -23,7 +23,7 @@ self._oport.decrease_binded_count() self._iport.set_free() - def transmit(self): + def process(self): """ Send value from output port to input port """ self._iport.pass_value(self._oport.get_valure())
c4a0dc9ecc12a82735738fe4b80dc74f991b66d7
csft/__main__.py
csft/__main__.py
#!/usr/bin/env python # -*- coding:utf-8 -*- """ The entry point of csft. """ import argparse as ap from os.path import isdir from .csft import print_result def main(argv=None): parser = ap.ArgumentParser(add_help='add help') parser.add_argument('path', help='the directory to be analyzed') args = parser.parse_args(args=argv) if not isdir(args.path): raise TypeError('%s is not a directory!', args.path) return print_result(args.path) if __name__ == '__main__': raise SystemExit(main())
#!/usr/bin/env python # -*- coding:utf-8 -*- """ The entry point of csft. """ import argparse as ap from os.path import isdir from . import __name__ as _name from . import __version__ as _version from .csft import print_result def main(argv=None): """ Execute the application CLI. """ parser = ap.ArgumentParser(prog=_name) parser.add_argument('-V', '--version', action='version', version=_version) parser.add_argument('path', help='the directory to be analyzed') args = parser.parse_args(args=argv) if not isdir(args.path): raise TypeError('%s is not a directory!', args.path) return print_result(args.path) if __name__ == '__main__': raise SystemExit(main())
Add version option to CLI.
Add version option to CLI.
Python
mit
yanqd0/csft
--- +++ @@ -8,11 +8,15 @@ import argparse as ap from os.path import isdir +from . import __name__ as _name +from . import __version__ as _version from .csft import print_result def main(argv=None): - parser = ap.ArgumentParser(add_help='add help') + """ Execute the application CLI. """ + parser = ap.ArgumentParser(prog=_name) + parser.add_argument('-V', '--version', action='version', version=_version) parser.add_argument('path', help='the directory to be analyzed') args = parser.parse_args(args=argv) if not isdir(args.path):
fb28813fbb906c1ea7c4fb3c52e60219c3ae1f19
votacao_with_redis/management/commands/gera_votacao.py
votacao_with_redis/management/commands/gera_votacao.py
# coding: utf-8 from django.core.management.base import BaseCommand from ...models import Poll, Option class Command(BaseCommand): def handle(self, *args, **kwargs): Poll.objects.filter(id=1).delete() Option.objects.filter(id__in=[1, 2, 3, 4]).delete() question = Poll.objects.create(id=1, title="Quem deve ser o vencedor") option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0) option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0) option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0) option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0) question.save() option1.save() option2.save() option3.save() option4.save() print "Pesquisa e Opções cadastradas com sucesso"
# coding: utf-8 from django.core.management.base import BaseCommand from ...models import Poll, Option import redis cache = redis.StrictRedis(host='127.0.0.1', port=6379, db=0) class Command(BaseCommand): def handle(self, *args, **kwargs): options = [1, 2, 3, 4] Poll.objects.filter(id=1).delete() Option.objects.filter(id__in=options).delete() [cache.delete('votacao:option:{}'.format(opt)) for opt in options] question = Poll.objects.create(id=1, title="Quem deve ser o vencedor") option1 = Option.objects.create(id=1, name="Mario", pool=question, votes=0) option2 = Option.objects.create(id=2, name="Luigi", pool=question, votes=0) option3 = Option.objects.create(id=3, name="Yoshi", pool=question, votes=0) option4 = Option.objects.create(id=4, name="Princesa", pool=question, votes=0) question.save() option1.save() option2.save() option3.save() option4.save() print "Pesquisa e Opções cadastradas com sucesso"
Remove chaves do redis referentes a votaçãoi
Remove chaves do redis referentes a votaçãoi
Python
mit
douglasbastos/votacao_with_redis,douglasbastos/votacao_with_redis
--- +++ @@ -1,13 +1,19 @@ # coding: utf-8 from django.core.management.base import BaseCommand from ...models import Poll, Option + +import redis +cache = redis.StrictRedis(host='127.0.0.1', port=6379, db=0) class Command(BaseCommand): def handle(self, *args, **kwargs): + options = [1, 2, 3, 4] Poll.objects.filter(id=1).delete() - Option.objects.filter(id__in=[1, 2, 3, 4]).delete() + Option.objects.filter(id__in=options).delete() + + [cache.delete('votacao:option:{}'.format(opt)) for opt in options] question = Poll.objects.create(id=1, title="Quem deve ser o vencedor")
e861def07da1f0dea7f5273d06e7dc674a79025f
adventure/urls.py
adventure/urls.py
from django.conf.urls import url, include from rest_framework import routers from . import views from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet router = routers.DefaultRouter(trailing_slash=False) router.register(r'players', PlayerViewSet) router.register(r'adventures', AdventureViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet) urlpatterns = [ url(r'^api/', include(router.urls)), url(r'^$', views.index, name='index'), url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'), # this route is a catch-all for compatibility with the Angular routes. It must be last in the list. # NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app. url(r'^(?P<path>.*)/$', views.index), ]
from django.conf.urls import url, include from rest_framework import routers from . import views from .views import PlayerViewSet, AdventureViewSet, RoomViewSet, ArtifactViewSet, EffectViewSet, MonsterViewSet router = routers.DefaultRouter(trailing_slash=False) router.register(r'players', PlayerViewSet) router.register(r'adventures', AdventureViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/rooms$', RoomViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/artifacts$', ArtifactViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/effects$', EffectViewSet) router.register(r'adventures/(?P<adventure_id>[\w-]+)/monsters$', MonsterViewSet) urlpatterns = [ url(r'^api/', include(router.urls)), url(r'^$', views.index, name='index'), url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'), # this route is a catch-all for compatibility with the Angular routes. It must be last in the list. # NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404. # NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app. url(r'^(?P<path>[^\.]*)/$', views.index), ]
Update Django catch-all URL path to not catch URLs with a . in them.
Update Django catch-all URL path to not catch URLs with a . in them. This makes missing JS files 404 properly instead of returning the HTML 404 page which confuses the parser.
Python
mit
kdechant/eamon,kdechant/eamon,kdechant/eamon,kdechant/eamon
--- +++ @@ -19,6 +19,7 @@ url(r'^adventure/(?P<adventure_id>[\w-]+)/$', views.adventure, name='adventure'), # this route is a catch-all for compatibility with the Angular routes. It must be last in the list. + # NOTE: this currently matches URLs without a . in them, so .js files and broken images will still 404. # NOTE: non-existent URLs won't 404 with this in place. They will be sent into the Angular app. - url(r'^(?P<path>.*)/$', views.index), + url(r'^(?P<path>[^\.]*)/$', views.index), ]
2baabd0f0d18e9bd81797a384e34adca0c39d7ed
bux_grader_framework/__init__.py
bux_grader_framework/__init__.py
""" bux_grader_framework ~~~~~~~~~~~~~~~~~~~~ A framework for bootstraping of external graders for your edX course. :copyright: 2014 Boston University :license: GNU Affero General Public License """ __version__ = '0.4.3' DEFAULT_LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '%(asctime)s - %(levelname)s - %(processName)s - %(name)s - %(message)s' } }, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'verbose' }, }, 'loggers': { '': { 'handlers': ['console'], 'level': 'INFO', 'propagate': True, }, }, } import os os.environ['STATSD_HOST'] = os.environ.get('STATSD_HOST', '127.0.0.1') os.environ['STATSD_PORT'] = os.environ.get('STATSD_PORT', 8125) from .conf import Config from .evaluators import registered_evaluators, BaseEvaluator from .grader import Grader from .workers import EvaluatorWorker, XQueueWorker from .xqueue import XQueueClient
""" bux_grader_framework ~~~~~~~~~~~~~~~~~~~~ A framework for bootstraping of external graders for your edX course. :copyright: 2014 Boston University :license: GNU Affero General Public License """ __version__ = '0.4.3' DEFAULT_LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'verbose': { 'format': '%(asctime)s - %(levelname)s - %(processName)s - %(name)s - %(message)s' } }, 'handlers': { 'console': { 'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'verbose' }, }, 'loggers': { '': { 'handlers': ['console'], 'level': 'INFO', 'propagate': True, }, }, } import os os.environ['STATSD_HOST'] = os.environ.get('STATSD_HOST', '127.0.0.1') os.environ['STATSD_PORT'] = os.environ.get('STATSD_PORT', '8125') from .conf import Config from .evaluators import registered_evaluators, BaseEvaluator from .grader import Grader from .workers import EvaluatorWorker, XQueueWorker from .xqueue import XQueueClient
Revert string -> integer change for statsd port
Revert string -> integer change for statsd port Turns out they want a string...
Python
agpl-3.0
bu-ist/bux-grader-framework,abduld/bux-grader-framework
--- +++ @@ -36,7 +36,7 @@ import os os.environ['STATSD_HOST'] = os.environ.get('STATSD_HOST', '127.0.0.1') -os.environ['STATSD_PORT'] = os.environ.get('STATSD_PORT', 8125) +os.environ['STATSD_PORT'] = os.environ.get('STATSD_PORT', '8125') from .conf import Config from .evaluators import registered_evaluators, BaseEvaluator
b0236a2cb936df9571139f074b35c178e2573593
dadi/__init__.py
dadi/__init__.py
import numpy # This gives a nicer printout for masked arrays. numpy.ma.default_real_fill_value = numpy.nan import Integration import PhiManip import Numerics import SFS import ms try: import Plotting except ImportError: pass try: import os __DIRECTORY__ = os.path.dirname(Integration.__file__) __svn_file__ = os.path.join(__DIRECTORY__, 'svnversion') __SVNVERSION__ = file(__svn_file__).read().strip() except: __SVNVERSION__ = 'Unknown'
import Integration import PhiManip import Numerics import SFS import ms try: import Plotting except ImportError: pass try: import os __DIRECTORY__ = os.path.dirname(Integration.__file__) __svn_file__ = os.path.join(__DIRECTORY__, 'svnversion') __SVNVERSION__ = file(__svn_file__).read().strip() except: __SVNVERSION__ = 'Unknown'
Remove extraneous setting of masked fill value.
Remove extraneous setting of masked fill value. git-svn-id: 4c7b13231a96299fde701bb5dec4bd2aaf383fc6@115 979d6bd5-6d4d-0410-bece-f567c23bd345
Python
bsd-3-clause
RyanGutenkunst/dadi,niuhuifei/dadi,cheese1213/dadi,yangjl/dadi,yangjl/dadi,ChenHsiang/dadi,paulirish/dadi,beni55/dadi,ChenHsiang/dadi,beni55/dadi,RyanGutenkunst/dadi,paulirish/dadi,cheese1213/dadi,niuhuifei/dadi
--- +++ @@ -1,7 +1,3 @@ -import numpy -# This gives a nicer printout for masked arrays. -numpy.ma.default_real_fill_value = numpy.nan - import Integration import PhiManip import Numerics
f10a8c498df0f83b43e636dfcb0b50d60860ed5e
googlecloudprofiler/__version__.py
googlecloudprofiler/__version__.py
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: skip-file """Version of Python Cloud Profiler module.""" # setup.py reads the version information from here to set package version __version__ = '1.1.0'
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: skip-file """Version of Python Cloud Profiler module.""" # setup.py reads the version information from here to set package version __version__ = '1.1.1'
Update Python agent version to include fixes for broken pipe issues.
Update Python agent version to include fixes for broken pipe issues. PiperOrigin-RevId: 315750638
Python
apache-2.0
GoogleCloudPlatform/cloud-profiler-python,GoogleCloudPlatform/cloud-profiler-python,GoogleCloudPlatform/cloud-profiler-python,GoogleCloudPlatform/cloud-profiler-python,GoogleCloudPlatform/cloud-profiler-python
--- +++ @@ -16,4 +16,4 @@ """Version of Python Cloud Profiler module.""" # setup.py reads the version information from here to set package version -__version__ = '1.1.0' +__version__ = '1.1.1'
829aa30a052b1a35d2c0d0797abe6b0c34c2f9d2
bluechip/player/createplayers.py
bluechip/player/createplayers.py
import random from models import Player def create_players(): #TODO: Need to centralize this function call. random.seed(123456789) # TODO: Do we need to delete all? Player.objects.all().delete() for _ in xrange(3000): p = Player.objects.create_player() p.save
import random from player.models import Player #TODO: Need to centralize this function call. random.seed(123456789) # For now just create a new class each Player.objects.all().delete() for _ in xrange(3000): p = Player.objects.create_player() p.save
Add script to create the recruiting class.
Add script to create the recruiting class.
Python
mit
isuraed/bluechip
--- +++ @@ -1,12 +1,11 @@ import random -from models import Player +from player.models import Player -def create_players(): - #TODO: Need to centralize this function call. - random.seed(123456789) +#TODO: Need to centralize this function call. +random.seed(123456789) - # TODO: Do we need to delete all? - Player.objects.all().delete() - for _ in xrange(3000): - p = Player.objects.create_player() - p.save +# For now just create a new class each +Player.objects.all().delete() +for _ in xrange(3000): + p = Player.objects.create_player() + p.save
468ce899542197f8ab7ae51800b56132e6e81bd4
problem_2/solution.py
problem_2/solution.py
def sum_even_fibonacci_numbers_1(): f1, f2, s, = 0, 1, 0, while f2 < 4000000: f2, f1 = f1, f1 + f2 if f2 % 2 == 0: s += f2 return s def sum_even_fibonacci_numbers_2(): s, a, b = 0, 1, 1 c = a + b while c < 4000000: s += c a = b + c b = a + c c = a + b return s
from timeit import timeit def sum_even_fibonacci_numbers_1(): f1, f2, s, = 0, 1, 0, while f2 < 4000000: f2, f1 = f1, f1 + f2 if f2 % 2 == 0: s += f2 return s def sum_even_fibonacci_numbers_2(): s, a, b = 0, 1, 1 c = a + b while c < 4000000: s += c a = b + c b = a + c c = a + b return s print "sum_even_fibonacci_numbers_1: {0}".format(timeit("sum_even_fibonacci_numbers_1()", "from __main__ import sum_even_fibonacci_numbers_1;")) print "sum_even_fibonacci_numbers_2: {0}".format(timeit("sum_even_fibonacci_numbers_2()", "from __main__ import sum_even_fibonacci_numbers_2;"))
Add timeit to measure each python implementation of problem 2
Add timeit to measure each python implementation of problem 2
Python
mit
mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler
--- +++ @@ -1,3 +1,4 @@ +from timeit import timeit def sum_even_fibonacci_numbers_1(): f1, f2, s, = 0, 1, 0, while f2 < 4000000: @@ -14,3 +15,5 @@ b = a + c c = a + b return s +print "sum_even_fibonacci_numbers_1: {0}".format(timeit("sum_even_fibonacci_numbers_1()", "from __main__ import sum_even_fibonacci_numbers_1;")) +print "sum_even_fibonacci_numbers_2: {0}".format(timeit("sum_even_fibonacci_numbers_2()", "from __main__ import sum_even_fibonacci_numbers_2;"))
d16cbf8994023dba5146ddb38e0db29202bb4614
back_office/models.py
back_office/models.py
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ Halaqat teachers information """ GENDER_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDER_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ Halaqat teachers information """ GENDER_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDER_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teacher_profile') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
Rename the related name for User one-to-one relationship
Rename the related name for User one-to-one relationship
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
--- +++ @@ -21,7 +21,7 @@ verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) - user = models.OneToOneField(to=User, related_name='teachers') + user = models.OneToOneField(to=User, related_name='teacher_profile') def enable(self): """
34fe4bb5cd5c4c35a659698e8d258c78da01887a
pynexus/api_client.py
pynexus/api_client.py
import requests class ApiClient: def __init__(self, host, username, password): self.uri = host + '/nexus/service/local/' self.username = username self.password = password def get_all_repositories(self): r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'}) return r def get_status(self): r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'}) return r
import requests class ApiClient: def __init__(self, host, username, password): self.uri = host + '/nexus/service/local/' self.username = username self.password = password def get_all_repositories(self): r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'}) return r def get_status(self): r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'}) return r def get_users(self): r = requests.get(self.uri + 'users', auth=(self.username, self.password), headers={'Accept': 'application/json'}) return r
Add get_users method to get a list of users
Add get_users method to get a list of users
Python
apache-2.0
rcarrillocruz/pynexus
--- +++ @@ -15,3 +15,8 @@ r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'}) return r + + def get_users(self): + r = requests.get(self.uri + 'users', auth=(self.username, self.password), headers={'Accept': 'application/json'}) + + return r
6eeecb5e36e5551ba3a3c35a9c7f52393d2f9d14
src/puzzle/problems/problem.py
src/puzzle/problems/problem.py
from src.data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError()
from src.data import meta class Problem(object): def __init__(self, name, lines): self.name = name self.lines = lines self._solutions = None self._constraints = [] @property def kind(self): return str(type(self)).strip("'<>").split('.').pop() @property def solution(self): return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn) # Invalidate solutions. self._solutions = None def solutions(self): if self._solutions is None: self._solutions = meta.Meta( (k, v) for k, v in self._solve().items() if all( [fn(k, v) for fn in self._constraints] ) ) return self._solutions def _solve(self): """Solves Problem. Returns: dict Dict mapping solution to score. """ raise NotImplementedError()
Add simple helper properties to Problem.
Add simple helper properties to Problem.
Python
mit
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
--- +++ @@ -7,6 +7,14 @@ self.lines = lines self._solutions = None self._constraints = [] + + @property + def kind(self): + return str(type(self)).strip("'<>").split('.').pop() + + @property + def solution(self): + return self.solutions().peek() def constrain(self, fn): self._constraints.append(fn)
bc3cf7bb0ac8e271f8786b9ec982fe1297d0ac93
tests/example_app/flask_app.py
tests/example_app/flask_app.py
import flask from pale.adapters import flask as pale_flask_adapter from pale.config import authenticator, context_creator from tests.example_app import api @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" return context @context_creator def create_pale_context(endpoint,request): return pale_flask_adapter.DefaultFlaskContext(endpoint, request) def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app
import logging import flask from pale.adapters import flask as pale_flask_adapter from pale.config import authenticator, context_creator from tests.example_app import api @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" logging.debug("pale.example_app: authenticate_pale_context") return context @context_creator def create_pale_context(endpoint,request): logging.debug("pale.example_app: create_pale_context") return pale_flask_adapter.DefaultFlaskContext(endpoint, request) def create_pale_flask_app(): """Creates a flask app, and registers a blueprint bound to pale.""" blueprint = flask.Blueprint('api', 'tests.example_app') pale_flask_adapter.bind_blueprint(api, blueprint) app = flask.Flask(__name__) app.register_blueprint(blueprint, url_prefix='/api') return app
Add debugging statements to example_app context. This is to help indicate to users that PALE's example_app is operating.
Add debugging statements to example_app context. This is to help indicate to users that PALE's example_app is operating.
Python
mit
Loudr/pale
--- +++ @@ -1,3 +1,4 @@ +import logging import flask from pale.adapters import flask as pale_flask_adapter @@ -5,14 +6,15 @@ from tests.example_app import api - @authenticator def authenticate_pale_context(context): """Don't actually authenticate anything in this test.""" + logging.debug("pale.example_app: authenticate_pale_context") return context @context_creator def create_pale_context(endpoint,request): + logging.debug("pale.example_app: create_pale_context") return pale_flask_adapter.DefaultFlaskContext(endpoint, request)
0d89712bda6e85901e839dec3e639c16aea42d48
tests/test_proxy_pagination.py
tests/test_proxy_pagination.py
import json from django.test import TestCase from rest_framework import status from tests.models import TestModel class ProxyPaginationTests(TestCase): """ Tests for drf-proxy-pagination """ def setUp(self): for n in range(200): TestModel.objects.create(n=n) def test_without_pager_param(self): resp = self.client.get('/data/', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') content = json.loads(resp.content) self.assertIn('next', content) self.assertIn('count', content) self.assertIn('page=', content['next']) self.assertNotIn('cursor=', content['next']) def test_with_pager_param(self): resp = self.client.get('/data/?pager=cursor', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') self.assertNotIn('count', resp.content) content = json.loads(resp.content) self.assertIn('next', content) self.assertNotIn('page=', content['next']) self.assertIn('cursor=', content['next'])
import json from django.test import TestCase from django.utils import six from rest_framework import status from tests.models import TestModel class ProxyPaginationTests(TestCase): """ Tests for drf-proxy-pagination """ def setUp(self): for n in range(200): TestModel.objects.create(n=n) def test_without_pager_param(self): resp = self.client.get('/data/', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content) self.assertIn('next', content) self.assertIn('count', content) self.assertIn('page=', content['next']) self.assertNotIn('cursor=', content['next']) def test_with_pager_param(self): resp = self.client.get('/data/?pager=cursor', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content) self.assertIn('next', content) self.assertNotIn('count', content) self.assertNotIn('page=', content['next']) self.assertIn('cursor=', content['next'])
Fix tests failing with Python 3
Fix tests failing with Python 3
Python
mit
tuffnatty/drf-proxy-pagination
--- +++ @@ -1,6 +1,7 @@ import json from django.test import TestCase +from django.utils import six from rest_framework import status @@ -19,7 +20,7 @@ resp = self.client.get('/data/', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') - content = json.loads(resp.content) + content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content) self.assertIn('next', content) self.assertIn('count', content) self.assertIn('page=', content['next']) @@ -29,8 +30,8 @@ resp = self.client.get('/data/?pager=cursor', HTTP_ACCEPT='application/json') self.assertEqual(resp.status_code, status.HTTP_200_OK) self.assertEqual(resp['Content-Type'], 'application/json') - self.assertNotIn('count', resp.content) - content = json.loads(resp.content) + content = json.loads(str(resp.content, encoding='utf8') if six.PY3 else resp.content) self.assertIn('next', content) + self.assertNotIn('count', content) self.assertNotIn('page=', content['next']) self.assertIn('cursor=', content['next'])
8f2e1ef30a62c19fc91eed48adc38ecfcdbc37d6
pinax/api/registry.py
pinax/api/registry.py
from __future__ import unicode_literals registry = {} bound_registry = {} def register(cls): registry[cls.api_type] = cls return cls def bind(parent=None, resource=None): def wrapper(endpointset): if parent is not None: endpointset.parent = parent endpointset.url.parent = parent.url if resource is not None: BoundResource = type( str("Bound{}".format(resource.__class__.__name__)), (resource,), {"endpointset": endpointset}, ) endpointset.resource_class = BoundResource # override registry with bound resource (typically what we want) registry[resource.api_type] = BoundResource endpointset.relationships = getattr(endpointset, "relationships", {}) return endpointset return wrapper
from __future__ import unicode_literals registry = {} bound_registry = {} def register(cls): registry[cls.api_type] = cls def as_jsonapi(self): return cls(self).serialize() cls.model.as_jsonapi = as_jsonapi return cls def bind(parent=None, resource=None): def wrapper(endpointset): if parent is not None: endpointset.parent = parent endpointset.url.parent = parent.url if resource is not None: BoundResource = type( str("Bound{}".format(resource.__class__.__name__)), (resource,), {"endpointset": endpointset}, ) endpointset.resource_class = BoundResource # override registry with bound resource (typically what we want) registry[resource.api_type] = BoundResource endpointset.relationships = getattr(endpointset, "relationships", {}) return endpointset return wrapper
Attach as_jsonapi to models for easy serialization
Attach as_jsonapi to models for easy serialization
Python
mit
pinax/pinax-api
--- +++ @@ -7,6 +7,11 @@ def register(cls): registry[cls.api_type] = cls + + def as_jsonapi(self): + return cls(self).serialize() + + cls.model.as_jsonapi = as_jsonapi return cls
73fa2788a1e8d6faef1eda78520b1908ebde66b5
examples/ported/_example.py
examples/ported/_example.py
import os import moderngl_window as mglw class Example(mglw.WindowConfig): gl_version = (3, 3) title = "ModernGL Example" window_size = (1280, 720) aspect_ratio = 16 / 9 resizable = False resource_dir = os.path.normpath(os.path.join(__file__, '../../data')) def __init__(self, **kwargs): super().__init__(**kwargs) @classmethod def run(cls): mglw.run_window_config(cls)
import os import moderngl_window as mglw class Example(mglw.WindowConfig): gl_version = (3, 3) title = "ModernGL Example" window_size = (1280, 720) aspect_ratio = 16 / 9 resizable = True resource_dir = os.path.normpath(os.path.join(__file__, '../../data')) def __init__(self, **kwargs): super().__init__(**kwargs) @classmethod def run(cls): mglw.run_window_config(cls)
Make examples resizable by default
Make examples resizable by default
Python
mit
cprogrammer1994/ModernGL,cprogrammer1994/ModernGL,cprogrammer1994/ModernGL
--- +++ @@ -8,7 +8,7 @@ title = "ModernGL Example" window_size = (1280, 720) aspect_ratio = 16 / 9 - resizable = False + resizable = True resource_dir = os.path.normpath(os.path.join(__file__, '../../data'))
ae5db950c683501c1ec77fee430b818293e6c603
csv2ofx/mappings/gls.py
csv2ofx/mappings/gls.py
from operator import itemgetter mapping = { 'has_header': True, 'currency': 'EUR', 'delimiter': ';', 'bank': 'GLS Bank', 'account': itemgetter('Kontonummer'), 'date': lambda r: r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' + r['Buchungstag'][-4:], # Chop up the dotted German date format and put it in ridiculous M/D/Y order 'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'), # locale.atof does not actually know how to deal with German separators, so we do it this way 'desc': itemgetter('Buchungstext'), 'payee': itemgetter('Auftraggeber/Empfänger'), }
from operator import itemgetter mapping = { 'has_header': True, 'currency': 'EUR', 'delimiter': ';', 'bank': 'GLS Bank', 'account': itemgetter('Kontonummer'), # Chop up the dotted German date format and put it in ridiculous M/D/Y order 'date': lambda r: r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' + r['Buchungstag'][-4:], # locale.atof does not actually know how to deal with German separators. # So we do it the crude way 'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'), 'desc': itemgetter('Buchungstext'), 'payee': itemgetter('Auftraggeber/Empfänger'), }
Split up lines to pass the linter
Split up lines to pass the linter
Python
mit
reubano/csv2ofx,reubano/csv2ofx
--- +++ @@ -6,8 +6,15 @@ 'delimiter': ';', 'bank': 'GLS Bank', 'account': itemgetter('Kontonummer'), - 'date': lambda r: r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' + r['Buchungstag'][-4:], # Chop up the dotted German date format and put it in ridiculous M/D/Y order - 'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'), # locale.atof does not actually know how to deal with German separators, so we do it this way + + # Chop up the dotted German date format and put it in ridiculous M/D/Y order + 'date': lambda r: r['Buchungstag'][3:5] + '/' + + r['Buchungstag'][:2] + '/' + + r['Buchungstag'][-4:], + + # locale.atof does not actually know how to deal with German separators. + # So we do it the crude way + 'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'), 'desc': itemgetter('Buchungstext'), 'payee': itemgetter('Auftraggeber/Empfänger'), }
a21aea6a0327d697d9213ecdbfd6365a77d0b40e
cardbox/deck_views.py
cardbox/deck_views.py
from django.http import HttpResponse def index(request): return HttpResponse("Hallo Welt!")
# coding=utf-8 # TODO: User authentication from django.contrib.localflavor import kw from django.http import HttpResponse from django.http.response import HttpResponseForbidden from django.template import context from django.views.generic import ListView, DetailView from django.views.generic import CreateView, UpdateView, DeleteView from django.core.urlresolvers import reverse_lazy from cardbox.deck_forms import DeckForm from cardbox.deck_model import Deck class DeckList(ListView): model = Deck class DeckCreate(CreateView): model = Deck form_class = DeckForm success_url = reverse_lazy('deck:deck_list') def get_context_data(self, **kwargs): context = super(DeckCreate, self).get_context_data(**kwargs) context['action'] = "erstellen" context['button_text'] = "Erstellen" return context class DeckDetailView(DetailView): model = Deck class DeckUpdate(UpdateView): model = Deck form_class = DeckForm success_url = reverse_lazy('deck:deck_list') def get_context_data(self, **kwargs): context = super(DeckUpdate, self).get_context_data(**kwargs) context['action'] = "bearbeiten" context['button_text'] = "Änderungen übernehmen" return context class DeckDelete(DeleteView): model = Deck success_url = reverse_lazy('deck:deck_list') def get(selfself, request, *args, **kwargs): """Never get the confirm_delete template!""" print "test" return HttpResponseForbidden("<h1>Access denied</h1>") def post(self, *args, **kwargs): # TODO: user authentication return super(DeckDelete, self).post(self, args, kwargs)
Add views for deck CRUD
Add views for deck CRUD
Python
mit
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
--- +++ @@ -1,5 +1,57 @@ +# coding=utf-8 +# TODO: User authentication +from django.contrib.localflavor import kw + from django.http import HttpResponse +from django.http.response import HttpResponseForbidden +from django.template import context +from django.views.generic import ListView, DetailView +from django.views.generic import CreateView, UpdateView, DeleteView +from django.core.urlresolvers import reverse_lazy + +from cardbox.deck_forms import DeckForm +from cardbox.deck_model import Deck + +class DeckList(ListView): + model = Deck -def index(request): - return HttpResponse("Hallo Welt!") +class DeckCreate(CreateView): + model = Deck + form_class = DeckForm + success_url = reverse_lazy('deck:deck_list') + + def get_context_data(self, **kwargs): + context = super(DeckCreate, self).get_context_data(**kwargs) + context['action'] = "erstellen" + context['button_text'] = "Erstellen" + return context + +class DeckDetailView(DetailView): + model = Deck + + +class DeckUpdate(UpdateView): + model = Deck + form_class = DeckForm + success_url = reverse_lazy('deck:deck_list') + + def get_context_data(self, **kwargs): + context = super(DeckUpdate, self).get_context_data(**kwargs) + context['action'] = "bearbeiten" + context['button_text'] = "Änderungen übernehmen" + return context + + +class DeckDelete(DeleteView): + model = Deck + success_url = reverse_lazy('deck:deck_list') + + def get(selfself, request, *args, **kwargs): + """Never get the confirm_delete template!""" + print "test" + return HttpResponseForbidden("<h1>Access denied</h1>") + + def post(self, *args, **kwargs): + # TODO: user authentication + return super(DeckDelete, self).post(self, args, kwargs)
db44d918ae92b64572728fa4954fd291c2f30731
instance/testing.py
instance/testing.py
import os #: Database backend SECRET_KEY = 'testkey' SQLALCHEMY_DATABASE_URI = 'postgres:///boxoffice_testing' SERVER_NAME = 'boxoffice.travis.dev:6500' BASE_URL = 'http://' + SERVER_NAME RAZORPAY_KEY_ID = os.environ.get('RAZORPAY_KEY_ID') RAZORPAY_KEY_SECRET = os.environ.get('RAZORPAY_KEY_SECRET') ALLOWED_ORIGINS = [BASE_URL, 'http://boxoffice.travis.dev:6500/', 'http://shreyas-wlan.dev:8000'] LASTUSER_SERVER = 'https://auth.hasgeek.com' LASTUSER_CLIENT_ID = '' LASTUSER_CLIENT_SECRET = '' TIMEZONE = 'Asia/Calcutta' CACHE_TYPE = 'redis' ASSET_MANIFEST_PATH = "static/build/manifest.json" # no trailing slash ASSET_BASE_PATH = '/static/build' WTF_CSRF_ENABLED = False
import os #: Database backend SECRET_KEY = 'testkey' SQLALCHEMY_DATABASE_URI = 'postgresql:///boxoffice_testing' SERVER_NAME = 'boxoffice.travis.dev:6500' BASE_URL = 'http://' + SERVER_NAME RAZORPAY_KEY_ID = os.environ.get('RAZORPAY_KEY_ID') RAZORPAY_KEY_SECRET = os.environ.get('RAZORPAY_KEY_SECRET') ALLOWED_ORIGINS = [BASE_URL, 'http://boxoffice.travis.dev:6500/', 'http://shreyas-wlan.dev:8000'] LASTUSER_SERVER = 'https://auth.hasgeek.com' LASTUSER_CLIENT_ID = '' LASTUSER_CLIENT_SECRET = '' TIMEZONE = 'Asia/Calcutta' CACHE_TYPE = 'redis' ASSET_MANIFEST_PATH = "static/build/manifest.json" # no trailing slash ASSET_BASE_PATH = '/static/build' WTF_CSRF_ENABLED = False
Fix db engine name for SQLAlchemy 1.4
Fix db engine name for SQLAlchemy 1.4
Python
agpl-3.0
hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice
--- +++ @@ -1,7 +1,7 @@ import os #: Database backend SECRET_KEY = 'testkey' -SQLALCHEMY_DATABASE_URI = 'postgres:///boxoffice_testing' +SQLALCHEMY_DATABASE_URI = 'postgresql:///boxoffice_testing' SERVER_NAME = 'boxoffice.travis.dev:6500' BASE_URL = 'http://' + SERVER_NAME
ca49ed4dfb660482663ed7b1e04c3c51644fd4cf
examples/convert_gcode_to_s3g.py
examples/convert_gcode_to_s3g.py
import os import sys lib_path = os.path.abspath('../') sys.path.append(lib_path) import s3g #input_file = '../doc/gcode_samples/skeinforge_single_extrusion_snake.gcode' #input_file = '../doc/gcode_samples/skeinforge_dual_extrusion_hilbert_cube.gcode' input_file = '../doc/gcode_samples/miracle_grue_single_extrusion.gcode' output_file = 'out.s3g' s = s3g.s3g() s.writer = s3g.FileWriter(open(output_file, 'w')) parser = s3g.GcodeParser() parser.s3g = s with open(input_file) as f: for line in f: parser.ExecuteLine(line)
import os import sys lib_path = os.path.abspath('../') sys.path.append(lib_path) import s3g #input_file = '../doc/gcode_samples/skeinforge_single_extrusion_snake.gcode' input_file = '../doc/gcode_samples/skeinforge_dual_extrusion_hilbert_cube.gcode' #input_file = '../doc/gcode_samples/miracle_grue_single_extrusion.gcode' output_file = 'out.s3g' s = s3g.s3g() s.writer = s3g.FileWriter(open(output_file, 'w')) parser = s3g.GcodeParser() parser.s3g = s with open(input_file) as f: for line in f: parser.ExecuteLine(line)
Convert gcode now houses all files.
Convert gcode now houses all files.
Python
agpl-3.0
makerbot/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g
--- +++ @@ -6,8 +6,8 @@ import s3g #input_file = '../doc/gcode_samples/skeinforge_single_extrusion_snake.gcode' -#input_file = '../doc/gcode_samples/skeinforge_dual_extrusion_hilbert_cube.gcode' -input_file = '../doc/gcode_samples/miracle_grue_single_extrusion.gcode' +input_file = '../doc/gcode_samples/skeinforge_dual_extrusion_hilbert_cube.gcode' +#input_file = '../doc/gcode_samples/miracle_grue_single_extrusion.gcode' output_file = 'out.s3g' s = s3g.s3g()
131033fa3ab170ac2a66c1dd89074ea74702fb52
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
icekit/page_types/articles/migrations/0002_auto_20161012_2231.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_articles', '0001_initial'), ] operations = [ migrations.AddField( model_name='article', name='slug', field=models.SlugField(max_length=255, default='woo'), preserve_default=False, ), migrations.AddField( model_name='article', name='title', field=models.CharField(max_length=255, default='woo'), preserve_default=False, ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_articles', '0001_initial'), ] operations = [ migrations.AddField( model_name='article', name='slug', field=models.SlugField(max_length=255), preserve_default=False, ), migrations.AddField( model_name='article', name='title', field=models.CharField(max_length=255), preserve_default=False, ), ]
Remove vestigial (?) "woo" default for article slug and title fields.
Remove vestigial (?) "woo" default for article slug and title fields.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
--- +++ @@ -14,13 +14,13 @@ migrations.AddField( model_name='article', name='slug', - field=models.SlugField(max_length=255, default='woo'), + field=models.SlugField(max_length=255), preserve_default=False, ), migrations.AddField( model_name='article', name='title', - field=models.CharField(max_length=255, default='woo'), + field=models.CharField(max_length=255), preserve_default=False, ), ]
decdec6846a1ca590713b2789d9c999b5c1d9502
dotfiles/cashbot.cfg.py
dotfiles/cashbot.cfg.py
{ 'REPORT_TYPE': 'HTML', 'REPORT_RECIPIENTS': '[email protected]', 'JENKINS_USERNAME': 'sknight', 'JENKINS_API_TOKEN': '594849a68d4911d6c39a2cb5f700c385', 'FEATURES': {'PASSWORD_DECRYPTION': False, 'AWS': False, 'ANYBAR': True}, # 'LOG_DB_URL': 'sqlite:///Users/steven.knight/Projects/qa/cashbot.db', # 'LOG_DB_URL': 'sqlite:/:memory:', 'LOG_DB_URL': "mysql://cashbot:jJT!3VK14&llVP0o@localhost:3306/cashbotdb", # 'ANYBAR_COLOR_FAIL': 'blue', # 'GNUPG_VERBOSE': True, # 'LOG_DB_DEBUG': True, # 'CONSOLE_LOG_LEVEL': 'DEBUG', }
{ 'REPORT_RECIPIENTS': '[email protected]', 'JENKINS_USERNAME': 'sknight', 'JENKINS_API_TOKEN': '594849a68d4911d6c39a2cb5f700c385', 'FEATURES': { 'PASSWORD_DECRYPTION': False, 'AWS': False, 'ANYBAR': True }, # 'LOG_DB_URL': 'sqlite:///Users/steven.knight/Projects/qa/cashbot.db', # 'LOG_DB_URL': 'sqlite:/:memory:', 'LOG_DB_URL': "mysql://cashbot:jJT!3VK14&llVP0o@localhost:3306/cashbotdb", # 'GNUPG_VERBOSE': True, # 'LOG_DB_DEBUG': True, # 'CONSOLE_LOG_LEVEL': 'DEBUG', }
Remove setting REPORT_TYPE. Reformatted code. Remove ANYBAR_COLOR_FAIL.
Remove setting REPORT_TYPE. Reformatted code. Remove ANYBAR_COLOR_FAIL.
Python
mit
skk/dotfiles,skk/dotfiles
--- +++ @@ -1,17 +1,18 @@ { - 'REPORT_TYPE': 'HTML', 'REPORT_RECIPIENTS': '[email protected]', 'JENKINS_USERNAME': 'sknight', 'JENKINS_API_TOKEN': '594849a68d4911d6c39a2cb5f700c385', - 'FEATURES': {'PASSWORD_DECRYPTION': False, 'AWS': False, 'ANYBAR': True}, + 'FEATURES': { + 'PASSWORD_DECRYPTION': False, + 'AWS': False, + 'ANYBAR': True + }, # 'LOG_DB_URL': 'sqlite:///Users/steven.knight/Projects/qa/cashbot.db', # 'LOG_DB_URL': 'sqlite:/:memory:', 'LOG_DB_URL': "mysql://cashbot:jJT!3VK14&llVP0o@localhost:3306/cashbotdb", - - # 'ANYBAR_COLOR_FAIL': 'blue', # 'GNUPG_VERBOSE': True, # 'LOG_DB_DEBUG': True,
a06591fe0597d49c83899533f3ff01deec017964
corehq/apps/locations/tests/test_location_safety.py
corehq/apps/locations/tests/test_location_safety.py
from django.views.generic.base import View from mock import MagicMock from ..permissions import is_location_safe, location_safe @location_safe def safe_fn_view(request, domain): return "hello" def unsafe_fn_view(request, domain): return "hello" @location_safe class SafeClsView(View): pass class UnsafeClsView(View): pass class UnSafeChildOfSafeClsView(SafeClsView): """This inherits its parent class's safety""" # TODO change this behavior @location_safe class SafeChildofUnsafeClsView(UnsafeClsView): """This shouldn't hoist its safety up to the parent class""" def test_view_safety(): def _assert(view_fn, is_safe): assert is_location_safe(view_fn, MagicMock(), (), {}) == is_safe, \ f"{view_fn} {'IS NOT' if is_safe else 'IS'} marked as location-safe" for view, is_safe in [ (safe_fn_view, True), (unsafe_fn_view, False), (SafeClsView, True), (UnsafeClsView, False), ]: yield _assert, view, is_safe
from django.views.generic.base import View from mock import MagicMock from ..permissions import is_location_safe, location_safe @location_safe def safe_fn_view(request, domain): return "hello" def unsafe_fn_view(request, domain): return "hello" @location_safe class SafeClsView(View): pass class UnsafeClsView(View): pass class UnSafeChildOfSafeClsView(SafeClsView): """This inherits its parent class's safety""" # TODO change this behavior @location_safe class SafeChildofUnsafeClsView(UnsafeClsView): """This shouldn't hoist its safety up to the parent class""" def test_view_safety(): def _assert(view_fn, is_safe): assert is_location_safe(view_fn, MagicMock(), (), {}) == is_safe, \ f"{view_fn} {'IS NOT' if is_safe else 'IS'} marked as location-safe" for view, is_safe in [ (safe_fn_view, True), (unsafe_fn_view, False), (SafeClsView.as_view(), True), (UnsafeClsView.as_view(), False), (UnSafeChildOfSafeClsView.as_view(), True), (SafeChildofUnsafeClsView.as_view(), True), ]: yield _assert, view, is_safe
Clarify current behavior regarding inheritance
Clarify current behavior regarding inheritance
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
--- +++ @@ -40,7 +40,9 @@ for view, is_safe in [ (safe_fn_view, True), (unsafe_fn_view, False), - (SafeClsView, True), - (UnsafeClsView, False), + (SafeClsView.as_view(), True), + (UnsafeClsView.as_view(), False), + (UnSafeChildOfSafeClsView.as_view(), True), + (SafeChildofUnsafeClsView.as_view(), True), ]: yield _assert, view, is_safe
cde02a3129d276d02054e04c1b0a0b605b837d32
eodatasets3/__init__.py
eodatasets3/__init__.py
# coding=utf-8 from __future__ import absolute_import from ._version import get_versions REPO_URL = "https://github.com/GeoscienceAustralia/eo-datasets.git" __version__ = get_versions()["version"] del get_versions
# coding=utf-8 from __future__ import absolute_import from ._version import get_versions from .assemble import DatasetAssembler REPO_URL = "https://github.com/GeoscienceAustralia/eo-datasets.git" __version__ = get_versions()["version"] del get_versions __all__ = (DatasetAssembler, REPO_URL, __version__)
Allow assembler to be imported from eodatasets3 root
Allow assembler to be imported from eodatasets3 root
Python
apache-2.0
GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets,jeremyh/eo-datasets,GeoscienceAustralia/eo-datasets
--- +++ @@ -3,8 +3,11 @@ from __future__ import absolute_import from ._version import get_versions +from .assemble import DatasetAssembler REPO_URL = "https://github.com/GeoscienceAustralia/eo-datasets.git" __version__ = get_versions()["version"] del get_versions + +__all__ = (DatasetAssembler, REPO_URL, __version__)
98f8a8fb51ae539aad6a3e2faebced4b806c3f0c
filer/utils/generate_filename.py
filer/utils/generate_filename.py
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) return os.path.join(random_path, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
from __future__ import unicode_literals try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text from django.utils.timezone import now from filer.utils.files import get_valid_filename import os def by_date(instance, filename): datepart = force_text(now().strftime("%Y/%m/%d")) return os.path.join(datepart, get_valid_filename(filename)) def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, get_valid_filename(filename)) class prefixed_factory(object): def __init__(self, upload_to, prefix): self.upload_to = upload_to self.prefix = prefix def __call__(self, instance, filename): if callable(self.upload_to): upload_to_str = self.upload_to(instance, filename) else: upload_to_str = self.upload_to if not self.prefix: return upload_to_str return os.path.join(self.prefix, upload_to_str)
Build random path using os.path.join
Build random path using os.path.join
Python
bsd-3-clause
o-zander/django-filer,nimbis/django-filer,nimbis/django-filer,webu/django-filer,divio/django-filer,matthiask/django-filer,skirsdeda/django-filer,stefanfoulis/django-filer,sopraux/django-filer,Flight/django-filer,sopraux/django-filer,belimawr/django-filer,matthiask/django-filer,o-zander/django-filer,DylannCordel/django-filer,jakob-o/django-filer,lory87/django-filer,stefanfoulis/django-filer,o-zander/django-filer,skirsdeda/django-filer,nephila/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,stefanfoulis/django-filer,DylannCordel/django-filer,skirsdeda/django-filer,divio/django-filer,vechorko/django-filer,nephila/django-filer,jakob-o/django-filer,nimbis/django-filer,jakob-o/django-filer,webu/django-filer,Flight/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,kriwil/django-filer,belimawr/django-filer,civicresourcegroup/django-filer,mkoistinen/django-filer,mkoistinen/django-filer,Flight/django-filer,lory87/django-filer,kriwil/django-filer,stefanfoulis/django-filer,Flight/django-filer,vstoykov/django-filer,civicresourcegroup/django-filer,lory87/django-filer,Flight/django-filer,vstoykov/django-filer,skirsdeda/django-filer,kriwil/django-filer,divio/django-filer,webu/django-filer,nephila/django-filer,sopraux/django-filer,o-zander/django-filer,vechorko/django-filer,DylannCordel/django-filer,mkoistinen/django-filer,vstoykov/django-filer,jakob-o/django-filer,kriwil/django-filer,nimbis/django-filer,vechorko/django-filer,stefanfoulis/django-filer,belimawr/django-filer,skirsdeda/django-filer,webu/django-filer,matthiask/django-filer,lory87/django-filer,sopraux/django-filer,vechorko/django-filer,divio/django-filer,civicresourcegroup/django-filer,DylannCordel/django-filer,matthiask/django-filer,jakob-o/django-filer
--- +++ @@ -17,8 +17,8 @@ def randomized(instance, filename): import uuid uuid_str = str(uuid.uuid4()) - random_path = "%s/%s/%s" % (uuid_str[0:2], uuid_str[2:4], uuid_str) - return os.path.join(random_path, get_valid_filename(filename)) + return os.path.join(uuid_str[0:2], uuid_str[2:4], uuid_str, + get_valid_filename(filename)) class prefixed_factory(object):
f8e375bdc07b6fdefdae589f2d75c4ec0f5f3864
website/search/mutation_result.py
website/search/mutation_result.py
from models import Protein, Mutation class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'] = Mutation.query.filter_by( protein=state['protein'], **state['mutation_kwargs'] ).one() del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state)
from models import Protein, Mutation from database import get_or_create class SearchResult: def __init__(self, protein, mutation, is_mutation_novel, type, **kwargs): self.protein = protein self.mutation = mutation self.is_mutation_novel = is_mutation_novel self.type = type self.meta_user = None self.__dict__.update(kwargs) def __getstate__(self): state = self.__dict__.copy() state['protein_refseq'] = self.protein.refseq del state['protein'] state['mutation_kwargs'] = { 'position': self.mutation.position, 'alt': self.mutation.alt } del state['mutation'] state['meta_user'].mutation = None return state def __setstate__(self, state): state['protein'] = Protein.query.filter_by( refseq=state['protein_refseq'] ).one() del state['protein_refseq'] state['mutation'], created = get_or_create( Mutation, protein=state['protein'], **state['mutation_kwargs'] ) del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation'] state['mutation'].meta_user = state['meta_user'] self.__dict__.update(state)
Fix result loading for novel mutations
Fix result loading for novel mutations
Python
lgpl-2.1
reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations
--- +++ @@ -1,4 +1,5 @@ from models import Protein, Mutation +from database import get_or_create class SearchResult: @@ -34,10 +35,11 @@ ).one() del state['protein_refseq'] - state['mutation'] = Mutation.query.filter_by( + state['mutation'], created = get_or_create( + Mutation, protein=state['protein'], **state['mutation_kwargs'] - ).one() + ) del state['mutation_kwargs'] state['meta_user'].mutation = state['mutation']
dad38c399c4687c93c69255df0f9d69d1bb386c4
yawf/models.py
yawf/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from yawf.config import INITIAL_STATE from yawf.base_model import WorkflowAwareModelBase class WorkflowAwareModel(WorkflowAwareModelBase): class Meta: abstract = True state = models.CharField(default=INITIAL_STATE, max_length=32, db_index=True, editable=False, verbose_name=_('state'))
from django.db import models from django.utils.translation import ugettext_lazy as _ from yawf.config import INITIAL_STATE from yawf.base_model import WorkflowAwareModelBase class WorkflowAwareModel(WorkflowAwareModelBase, models.Model): class Meta: abstract = True state = models.CharField(default=INITIAL_STATE, max_length=32, db_index=True, editable=False, verbose_name=_('state'))
Add missing parent for WorkflowAwareModel
Add missing parent for WorkflowAwareModel
Python
mit
freevoid/yawf
--- +++ @@ -5,7 +5,7 @@ from yawf.base_model import WorkflowAwareModelBase -class WorkflowAwareModel(WorkflowAwareModelBase): +class WorkflowAwareModel(WorkflowAwareModelBase, models.Model): class Meta: abstract = True
790c9debf2f89dca49a5e6f5b3842aebfba2a796
run.py
run.py
#!/usr/bin/env python import os import signal import sys from app.main import app, queues, sched def _teardown(signal, frame): sched.shutdown(wait=False) for queue in queues.values(): queue.put(None) queues.clear() # Let the interrupt bubble up so that Flask/Werkzeug see it raise KeyboardInterrupt if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'debug': app.debug = True signal.signal(signal.SIGINT, _teardown) port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
#!/usr/bin/env python import os import signal import sys from app.main import app, queues, sched def _teardown(signal, frame): sched.shutdown(wait=False) for queue in queues.values(): queue.put(None) queues.clear() # Let the interrupt bubble up so that Flask/Werkzeug see it raise KeyboardInterrupt if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'debug': app.debug = True signal.signal(signal.SIGINT, _teardown) port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
Add interrupt signal handler in non-debug mode
Add interrupt signal handler in non-debug mode
Python
mit
martinp/jarvis2,martinp/jarvis2,martinp/jarvis2,mpolden/jarvis2,mpolden/jarvis2,Foxboron/Frank,Foxboron/Frank,mpolden/jarvis2,Foxboron/Frank
--- +++ @@ -18,6 +18,6 @@ if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == 'debug': app.debug = True - signal.signal(signal.SIGINT, _teardown) + signal.signal(signal.SIGINT, _teardown) port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port, use_reloader=False, threaded=True)
32d4ea22c1bca4a96a8d826f0225dfee2a4c21d2
django_tenants/tests/__init__.py
django_tenants/tests/__init__.py
from .test_routes import * from .test_tenants import * from .test_cache import *
from .files import * from .staticfiles import * from .template import * from .test_routes import * from .test_tenants import * from .test_cache import *
Include static file-related tests in 'test' package.
fix(tests): Include static file-related tests in 'test' package.
Python
mit
tomturner/django-tenants,tomturner/django-tenants,tomturner/django-tenants
--- +++ @@ -1,3 +1,6 @@ +from .files import * +from .staticfiles import * +from .template import * from .test_routes import * from .test_tenants import * from .test_cache import *
47310125c53d80e4ff09af6616955ccb2d9e3bc8
conanfile.py
conanfile.py
from conans import ConanFile, CMake class ConanUsingSilicium(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "Boost/1.59.0@lasote/stable", "silicium/0.1@tyroxx/testing" generators = "cmake" default_options = "Boost:shared=True"
from conans import ConanFile, CMake class ConanUsingSilicium(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "silicium/0.1@tyroxx/testing" generators = "cmake" default_options = "Boost:shared=True"
Boost should be referenced automatically by silicium
Boost should be referenced automatically by silicium
Python
mit
TyRoXx/conan_using_silicium,TyRoXx/conan_using_silicium
--- +++ @@ -2,6 +2,6 @@ class ConanUsingSilicium(ConanFile): settings = "os", "compiler", "build_type", "arch" - requires = "Boost/1.59.0@lasote/stable", "silicium/0.1@tyroxx/testing" + requires = "silicium/0.1@tyroxx/testing" generators = "cmake" default_options = "Boost:shared=True"