commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
53f7acf5fc04ca6f86456fda95504ba41046d860 | openedx/features/specializations/templatetags/sso_meta_tag.py | openedx/features/specializations/templatetags/sso_meta_tag.py | from django import template
from django.template import Template
register = template.Library()
@register.simple_tag(takes_context=True)
def sso_meta(context):
return Template('<meta name="title" content="${ title }">' + ' ' +
'<meta name="description" content="${ subtitle }">' + ' ' +
## OG (Open Graph) title and description added below to give social media info to display
## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags)
'<meta property="og:title" content="${ title }">' + ' ' +
'<meta property="og:description" content="${ subtitle }">' + ' ' +
'<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' +
'<meta property="og:image:width" content="512">' + ' ' +
'<meta property="og:image:height" content="512">' + ' ' +
'<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' +
'<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' +
'<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' +
'<meta name="twitter:title" content="${ title }">' + ' ' +
'<meta name="twitter:description" content="${ subtitle }">').render(context);
| from django import template
from django.template.loader import get_template
register = template.Library()
@register.simple_tag(takes_context=True)
def sso_meta(context):
return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
| Add Django Custom Tag SSO | Add Django Custom Tag SSO
| Python | agpl-3.0 | philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform |
27bf030df4c2f46eef8cdcd9441bd5d21a22e5cc | parkings/api/public/urls.py | parkings/api/public/urls.py | from django.conf.urls import include, url
from rest_framework.routers import DefaultRouter
from .parking_area import PublicAPIParkingAreaViewSet
from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet
router = DefaultRouter()
router.register(r'parking_area', PublicAPIParkingAreaViewSet)
router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet)
urlpatterns = [
url(r'^', include(router.urls, namespace='v1')),
]
| from django.conf.urls import include, url
from rest_framework.routers import DefaultRouter
from .parking_area import PublicAPIParkingAreaViewSet
from .parking_area_statistics import PublicAPIParkingAreaStatisticsViewSet
router = DefaultRouter()
router.register(r'parking_area', PublicAPIParkingAreaViewSet, base_name='parkingarea')
router.register(r'parking_area_statistics', PublicAPIParkingAreaStatisticsViewSet, base_name='parkingareastatistics')
urlpatterns = [
url(r'^', include(router.urls, namespace='v1')),
]
| Fix public API root view links | Fix public API root view links
| Python | mit | tuomas777/parkkihubi |
1eb3df5ca3c86effa85ba76a8bdf549f3560f3a5 | landscapesim/serializers/regions.py | landscapesim/serializers/regions.py | import json
from rest_framework import serializers
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
class Meta:
model = Region
fields = ('id', 'name')
| import json
from rest_framework import serializers
from django.core.urlresolvers import reverse
from landscapesim.models import Region
class ReportingUnitSerializer(serializers.Serializer):
type = serializers.SerializerMethodField()
properties = serializers.SerializerMethodField()
geometry = serializers.SerializerMethodField()
class Meta:
fields = ('type', 'geometry', 'properties',)
def get_type(self, obj):
return 'Feature'
def get_geometry(self, obj):
return json.loads(obj.polygon.json)
def get_properties(self, obj):
return {
'id': obj.id,
'unit_id': obj.unit_id,
'name': obj.name
}
class RegionSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
class Meta:
model = Region
fields = ('id', 'name', 'url')
def get_url(self, obj):
return reverse('region-reporting-units', args=[obj.id])
| Add reporting unit URL to region serializer. | Add reporting unit URL to region serializer.
| Python | bsd-3-clause | consbio/landscapesim,consbio/landscapesim,consbio/landscapesim |
521b4fbec142306fad2347a5dd3a56aeec2f9498 | events/search_indexes.py | events/search_indexes.py | from haystack import indexes
from .models import Event, Place, PublicationStatus
from django.utils.html import strip_tags
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
start_time = indexes.DateTimeField(model_attr='start_time')
end_time = indexes.DateTimeField(model_attr='end_time')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Event
def prepare(self, obj):
#obj.lang_keywords = obj.keywords.filter(language=get_language())
if obj.description:
obj.description = strip_tags(obj.description)
return super(EventIndex, self).prepare(obj)
def index_queryset(self, using=None):
return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC)
class PlaceIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Place
| from haystack import indexes
from .models import Event, Place, PublicationStatus
from django.utils.html import strip_tags
class EventIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
start_time = indexes.DateTimeField(model_attr='start_time')
end_time = indexes.DateTimeField(model_attr='end_time')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Event
def prepare(self, obj):
#obj.lang_keywords = obj.keywords.filter(language=get_language())
if obj.description:
obj.description = strip_tags(obj.description)
return super(EventIndex, self).prepare(obj)
def index_queryset(self, using=None):
return self.get_model().objects.filter(publication_status=PublicationStatus.PUBLIC)
class PlaceIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
autosuggest = indexes.EdgeNgramField(model_attr='name')
def get_updated_field(self):
return 'last_modified_time'
def get_model(self):
return Place
def index_queryset(self, using=None):
return self.get_model().objects.filter(deleted=False)
| Remove deleted places from place index | Remove deleted places from place index
| Python | mit | aapris/linkedevents,aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,aapris/linkedevents |
c65b6adafcdf791030090a72f4490171012ce4fd | config/fuzz_pox_simple.py | config/fuzz_pox_simple.py |
from config.experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer
from sts.input_traces.input_logger import InputLogger
from sts.simulation_state import SimulationConfig
# Use POX as our controller
start_cmd = ('''./pox.py openflow.discovery forwarding.l2_multi '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(start_cmd, cwd="pox/")]
topology_class = MeshTopology
topology_params = "num_switches=2"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params)
control_flow = Fuzzer(simulation_config,
input_logger=InputLogger(),
invariant_check_name="InvariantChecker.check_liveness",
halt_on_violation=True)
|
from config.experiment_config_lib import ControllerConfig
from sts.topology import MeshTopology
from sts.control_flow import Fuzzer
from sts.input_traces.input_logger import InputLogger
from sts.simulation_state import SimulationConfig
# Use POX as our controller
start_cmd = ('''./pox.py samples.buggy '''
'''openflow.of_01 --address=__address__ --port=__port__''')
controllers = [ControllerConfig(start_cmd, cwd="pox/")]
topology_class = MeshTopology
topology_params = "num_switches=2"
simulation_config = SimulationConfig(controller_configs=controllers,
topology_class=topology_class,
topology_params=topology_params)
control_flow = Fuzzer(simulation_config,
input_logger=InputLogger(),
invariant_check_name="InvariantChecker.check_liveness",
check_interval=5,
halt_on_violation=True)
| Use a buggy pox module | Use a buggy pox module
| Python | apache-2.0 | ucb-sts/sts,jmiserez/sts,jmiserez/sts,ucb-sts/sts |
84f4626a623283c3c4d98d9be0ccd69fe837f772 | download_data.py | download_data.py | #!/usr/bin/env python
from lbtoolbox.download import download
import os
import inspect
import tarfile
def here(f):
me = inspect.getsourcefile(here)
return os.path.join(os.path.dirname(os.path.abspath(me)), f)
def download_extract(url, into):
fname = download(url, into)
print("Extracting...")
with tarfile.open(fname) as f:
f.extractall(path=into)
if __name__ == '__main__':
baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/tosato/'
datadir = here('data')
# First, download the Tosato datasets.
download_extract(baseurl + 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir)
download_extract(baseurl + 'CAVIARShoppingCenterFull.tar.bz2', into=datadir)
download_extract(baseurl + 'HIIT6HeadPose.tar.bz2', into=datadir)
download_extract(baseurl + 'HOC.tar.bz2', into=datadir)
download_extract(baseurl + 'HOCoffee.tar.bz2', into=datadir)
download_extract(baseurl + 'IHDPHeadPose.tar.bz2', into=datadir)
download_extract(baseurl + 'QMULPoseHeads.tar.bz2', into=datadir)
| #!/usr/bin/env python
from lbtoolbox.download import download
import os
import inspect
import tarfile
def here(f):
me = inspect.getsourcefile(here)
return os.path.join(os.path.dirname(os.path.abspath(me)), f)
def download_extract(urlbase, name, into):
print("Downloading " + name)
fname = download(os.path.join(urlbase, name), into)
print("Extracting...")
with tarfile.open(fname) as f:
f.extractall(path=into)
if __name__ == '__main__':
baseurl = 'https://omnomnom.vision.rwth-aachen.de/data/BiternionNets/'
datadir = here('data')
# First, download the Tosato datasets.
download_extract(baseurl, 'CAVIARShoppingCenterFullOccl.tar.bz2', into=datadir)
download_extract(baseurl, 'CAVIARShoppingCenterFull.tar.bz2', into=datadir)
download_extract(baseurl, 'HIIT6HeadPose.tar.bz2', into=datadir)
download_extract(baseurl, 'HOC.tar.bz2', into=datadir)
download_extract(baseurl, 'HOCoffee.tar.bz2', into=datadir)
download_extract(baseurl, 'IHDPHeadPose.tar.bz2', into=datadir)
download_extract(baseurl, 'QMULPoseHeads.tar.bz2', into=datadir)
print("Done.")
| Update download URL and add more output to downloader. | Update download URL and add more output to downloader.
| Python | mit | lucasb-eyer/BiternionNet |
c94c86df52184af6b07dcf58951688cea178b8e6 | dmoj/executors/LUA.py | dmoj/executors/LUA.py | from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.lua'
name = 'LUA'
command = 'lua'
address_grace = 131072
test_program = "io.write(io.read('*all'))"
@classmethod
def get_version_flags(cls, command):
return ['-v']
| from .base_executor import ScriptExecutor
class Executor(ScriptExecutor):
ext = '.lua'
name = 'LUA'
command = 'lua'
command_paths = ['lua', 'lua5.3', 'lua5.2', 'lua5.1']
address_grace = 131072
test_program = "io.write(io.read('*all'))"
@classmethod
def get_version_flags(cls, command):
return ['-v']
| Make lua autoconfig work better. | Make lua autoconfig work better.
| Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge |
7cef87a81278c227db0cb07329d1b659dbd175b3 | mail_factory/models.py | mail_factory/models.py | # -*- coding: utf-8 -*-
import django
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
| # -*- coding: utf-8 -*-
import django
from django.conf import settings
from django.utils.module_loading import module_has_submodule
try:
from importlib import import_module
except ImportError:
# Compatibility for python-2.6
from django.utils.importlib import import_module
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
| Use standard library instead of django.utils.importlib | Use standard library instead of django.utils.importlib
> django.utils.importlib is a compatibility library for when Python 2.6 was
> still supported. It has been obsolete since Django 1.7, which dropped support
> for Python 2.6, and is removed in 1.9 per the deprecation cycle.
> Use Python's import_module function instead
> — [1]
References:
[1] http://stackoverflow.com/a/32763639
[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
| Python | bsd-3-clause | novafloss/django-mail-factory,novafloss/django-mail-factory |
3ca11cd2ba0bcff8bbc4d01df2ba5b72f5b2e4b0 | warehouse/packaging/urls.py | warehouse/packaging/urls.py | # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
from werkzeug.routing import Rule, EndpointPrefix
urls = [
EndpointPrefix("warehouse.packaging.views.", [
Rule(
"/projects/<project_name>/",
methods=["GET"],
endpoint="project_detail",
),
Rule(
"/projects/<project_name>/<version>/",
methods=["GET"],
endpoint="project_detail",
),
]),
]
| # Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
from werkzeug.routing import Rule, EndpointPrefix
urls = [
EndpointPrefix("warehouse.packaging.views.", [
Rule(
"/project/<project_name>/",
methods=["GET"],
endpoint="project_detail",
),
Rule(
"/project/<project_name>/<version>/",
methods=["GET"],
endpoint="project_detail",
),
]),
]
| Remove the plural from the url | Remove the plural from the url
| Python | apache-2.0 | robhudson/warehouse,mattrobenolt/warehouse,techtonik/warehouse,techtonik/warehouse,mattrobenolt/warehouse,mattrobenolt/warehouse,robhudson/warehouse |
6b2202d0b7a4ef544b63e1692e40c5fec9c5930a | dash_renderer/__init__.py | dash_renderer/__init__.py | # For reasons that I don't fully understand,
# unless I include __file__ in here, the packaged version
# of this module will just be a .egg file, not a .egg folder.
# And if it's just a .egg file, it won't include the necessary
# dependencies from MANIFEST.in.
# Found the __file__ clue by inspecting the `python setup.py install`
# command in the dash_html_components package which printed out:
# `dash_html_components.__init__: module references __file__`
# TODO - Understand this better
from version import __version__
__file__
# Dash renderer's dependencies get loaded in a special order by the server:
# React bundles first, the renderer bundle at the very end.
_js_dist_dependencies = [
{
'external_url': [
'https://unpkg.com/[email protected]/dist/react.min.js',
'https://unpkg.com/[email protected]/dist/react-dom.min.js'
],
'relative_package_path': [
'[email protected]',
'[email protected]'
],
'namespace': 'dash_renderer'
}
]
_js_dist = [
{
'relative_package_path': 'bundle.js',
"external_url": (
'https://unpkg.com/dash-renderer@{}'
'/dash_renderer/bundle.js'
).format(__version__),
'namespace': 'dash_renderer'
}
]
| # For reasons that I don't fully understand,
# unless I include __file__ in here, the packaged version
# of this module will just be a .egg file, not a .egg folder.
# And if it's just a .egg file, it won't include the necessary
# dependencies from MANIFEST.in.
# Found the __file__ clue by inspecting the `python setup.py install`
# command in the dash_html_components package which printed out:
# `dash_html_components.__init__: module references __file__`
# TODO - Understand this better
from .version import __version__
__file__
# Dash renderer's dependencies get loaded in a special order by the server:
# React bundles first, the renderer bundle at the very end.
_js_dist_dependencies = [
{
'external_url': [
'https://unpkg.com/[email protected]/dist/react.min.js',
'https://unpkg.com/[email protected]/dist/react-dom.min.js'
],
'relative_package_path': [
'[email protected]',
'[email protected]'
],
'namespace': 'dash_renderer'
}
]
_js_dist = [
{
'relative_package_path': 'bundle.js',
"external_url": (
'https://unpkg.com/dash-renderer@{}'
'/dash_renderer/bundle.js'
).format(__version__),
'namespace': 'dash_renderer'
}
]
| Extend import statement to support Python 3 | Extend import statement to support Python 3
| Python | mit | plotly/dash,plotly/dash,plotly/dash,plotly/dash,plotly/dash |
ad276d549eebe9c6fe99a629a76f02fc04b2bd51 | tests/test_pubannotation.py | tests/test_pubannotation.py |
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount == 50
assert relationCount == 1454
assert entityCount == 2657
if __name__ == '__main__':
test_pubannotation()
|
import kindred
def test_pubannotation():
corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development')
assert isinstance(corpus,kindred.Corpus)
fileCount = len(corpus.documents)
entityCount = sum([ len(d.entities) for d in corpus.documents ])
relationCount = sum([ len(d.relations) for d in corpus.documents ])
assert fileCount > 0
assert relationCount > 0
assert entityCount > 0
if __name__ == '__main__':
test_pubannotation()
| Simplify pubannotation test to not check exact numbers | Simplify pubannotation test to not check exact numbers
| Python | mit | jakelever/kindred,jakelever/kindred |
4b659b7b2552da033753349e059eee172025e00e | adbwp/__init__.py | adbwp/__init__.py | """
adbwp
~~~~~
Android Debug Bridge (ADB) Wire Protocol.
"""
# pylint: disable=wildcard-import
from . import exceptions
from .exceptions import *
from . import header
from .header import Header
from . import message
from .message import Message
__all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message']
__version__ = '0.0.1'
| """
adbwp
~~~~~
Android Debug Bridge (ADB) Wire Protocol.
"""
# pylint: disable=wildcard-import
from . import exceptions, header, message
from .exceptions import *
from .header import Header
from .message import Message
__all__ = exceptions.__all__ + ['header', 'message', 'Header', 'Message']
__version__ = '0.0.1'
| Reorder imports based on isort rules. | Reorder imports based on isort rules.
| Python | apache-2.0 | adbpy/wire-protocol |
12b1b7a477cc99e1c3ec3405269999c7974677b6 | aioinotify/cli.py | aioinotify/cli.py | import logging
from argparse import ArgumentParser
import asyncio
from .protocol import connect_inotify
logger = logging.getLogger(__name__)
def main():
parser = ArgumentParser()
parser.add_argument(
'-ll', '--log-level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], default='WARNING')
parser.add_argument('paths', nargs='+', help='File path(s) to watch for file system events')
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log_level))
try:
loop = asyncio.get_event_loop()
_, inotify = loop.run_until_complete(connect_inotify())
@asyncio.coroutine
def run(inotify):
@asyncio.coroutine
def callback(event):
print(event)
for path in args.paths:
watch = yield from inotify.watch(callback, path, all_events=True)
logger.debug('Added watch %s for all events in %s', watch.watch_descriptor, path)
yield from inotify.close_event.wait()
try:
loop.run_until_complete(run(inotify))
except KeyboardInterrupt:
inotify.close()
loop.run_until_complete(inotify.close_event.wait())
finally:
loop.close()
if __name__ == '__main__':
main()
| import logging
from argparse import ArgumentParser
import asyncio
from .protocol import connect_inotify
logger = logging.getLogger(__name__)
def main():
parser = ArgumentParser()
parser.add_argument(
'-ll', '--log-level', choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], default='WARNING')
parser.add_argument('paths', nargs='+', help='File path(s) to watch for file system events')
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log_level))
loop = asyncio.get_event_loop()
try:
_, inotify = loop.run_until_complete(connect_inotify())
@asyncio.coroutine
def run(inotify):
@asyncio.coroutine
def callback(event):
print(event)
for path in args.paths:
watch = yield from inotify.watch(callback, path, all_events=True)
logger.debug('Added watch %s for all events in %s', watch.watch_descriptor, path)
yield from inotify.close_event.wait()
try:
loop.run_until_complete(run(inotify))
except KeyboardInterrupt:
inotify.close()
loop.run_until_complete(inotify.close_event.wait())
finally:
loop.close()
if __name__ == '__main__':
main()
| Move getting the event loop out of try/except | Move getting the event loop out of try/except
| Python | apache-2.0 | mwfrojdman/aioinotify |
9384f76d4ecfe2a822747020ba20771019105aaa | metric_thread.py | metric_thread.py | #!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from threading import Thread, Lock
import time
from metric_item import MetricItem
from exec_proc import ExecProc
from sys import stdout
stdoutmutex = Lock()
class MetricThread(Thread):
def __init__(self,item,mutex):
Thread.__init__(self,name=item.getName())
self.mutex = mutex
self.pollingInterval = item.getPollingInterval()
self.name = item.getName()
self.proc = ExecProc()
self.proc.setCommand(item.getCommand())
self.proc.setDebug(item.getDebug())
def run(self): # run provides thread logic
while True:
output = self.proc.execute()
with self.mutex:
stdout.write(output)
stdout.flush()
time.sleep(self.pollingInterval)
| #!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from threading import Thread, Lock
import time
from metric_item import MetricItem
from exec_proc import ExecProc
from sys import stdout
stdoutmutex = Lock()
class MetricThread(Thread):
def __init__(self,item,mutex):
Thread.__init__(self,name=item.getName())
self.setDaemon(True)
self.mutex = mutex
self.pollingInterval = item.getPollingInterval()
self.name = item.getName()
self.proc = ExecProc()
self.proc.setCommand(item.getCommand())
self.proc.setDebug(item.getDebug())
def run(self): # run provides thread logic
while True:
output = self.proc.execute()
with self.mutex:
stdout.write(output)
stdout.flush()
time.sleep(self.pollingInterval)
| Set threads to daemons so that they exit when the main thread exits | Set threads to daemons so that they exit when the main thread exits
| Python | apache-2.0 | boundary/boundary-plugin-shell,boundary/boundary-plugin-shell,jdgwartney/boundary-plugin-shell,jdgwartney/boundary-plugin-shell |
164b07fefdd8db74ccce7ff44c33a6120cd98c86 | mfr/ext/tabular/libs/xlrd_tools.py | mfr/ext/tabular/libs/xlrd_tools.py | import xlrd
from ..exceptions import TableTooBigException, EmptyTableException
from ..configuration import config
from ..utilities import header_population
from ..compat import range
def xlsx_xlrd(fp):
"""Read and convert a xlsx file to JSON format using the xlrd library
:param fp: File pointer object
:return: tuple of table headers and data
"""
max_size = config['max_size']
wb = xlrd.open_workbook(fp.name)
# Currently only displays the first sheet if there are more than one.
sheet = wb.sheets()[0]
if sheet.ncols > max_size or sheet.nrows > max_size:
raise TableTooBigException("Table is too large to render.")
if sheet.ncols < 1 or sheet.nrows < 1:
raise EmptyTableException("Table is empty or corrupt.")
fields = sheet.row_values(0) if sheet.nrows else []
fields = [value or 'Unnamed: {0}'.format(index+1) for index, value in enumerate(fields)]
data = [dict(zip(fields, sheet.row_values(row_index)))
for row_index in range(1, sheet.nrows)]
header = header_population(fields)
return header, data
| import xlrd
from ..exceptions import TableTooBigException, EmptyTableException
from ..configuration import config
from ..utilities import header_population
from ..compat import range
def xlsx_xlrd(fp):
"""Read and convert a xlsx file to JSON format using the xlrd library
:param fp: File pointer object
:return: tuple of table headers and data
"""
max_size = config['max_size']
wb = xlrd.open_workbook(fp.name)
# Currently only displays the first sheet if there are more than one.
sheet = wb.sheets()[0]
if sheet.ncols > max_size or sheet.nrows > max_size:
raise TableTooBigException("Table is too large to render.")
if sheet.ncols < 1 or sheet.nrows < 1:
raise EmptyTableException("Table is empty or corrupt.")
fields = sheet.row_values(0) if sheet.nrows else []
fields = [str(value) or 'Unnamed: {0}'.format(index+1) for index, value in enumerate(fields)]
data = [dict(zip(fields, sheet.row_values(row_index)))
for row_index in range(1, sheet.nrows)]
header = header_population(fields)
return header, data
| Fix xlrd issue Column headers must be strings | Fix xlrd issue
Column headers must be strings
| Python | apache-2.0 | mfraezz/modular-file-renderer,rdhyee/modular-file-renderer,AddisonSchiller/modular-file-renderer,mfraezz/modular-file-renderer,TomBaxter/modular-file-renderer,rdhyee/modular-file-renderer,AddisonSchiller/modular-file-renderer,CenterForOpenScience/modular-file-renderer,felliott/modular-file-renderer,AddisonSchiller/modular-file-renderer,icereval/modular-file-renderer,haoyuchen1992/modular-file-renderer,CenterForOpenScience/modular-file-renderer,Johnetordoff/modular-file-renderer,haoyuchen1992/modular-file-renderer,TomBaxter/modular-file-renderer,felliott/modular-file-renderer,rdhyee/modular-file-renderer,felliott/modular-file-renderer,mfraezz/modular-file-renderer,Johnetordoff/modular-file-renderer,CenterForOpenScience/modular-file-renderer,icereval/modular-file-renderer,mfraezz/modular-file-renderer,haoyuchen1992/modular-file-renderer,TomBaxter/modular-file-renderer,Johnetordoff/modular-file-renderer,icereval/modular-file-renderer,felliott/modular-file-renderer,AddisonSchiller/modular-file-renderer,rdhyee/modular-file-renderer,TomBaxter/modular-file-renderer,Johnetordoff/modular-file-renderer,CenterForOpenScience/modular-file-renderer,haoyuchen1992/modular-file-renderer |
0f62dc9ba898db96390658107e9ebe9930f8b90a | mmiisort/main.py | mmiisort/main.py | from isort import SortImports
import itertools
import mothermayi.colors
import mothermayi.errors
def plugin():
return {
'name' : 'isort',
'pre-commit' : pre_commit,
}
def do_sort(filename):
results = SortImports(filename)
return results.in_lines != results.out_lines
def get_status(had_changes):
return mothermayi.colors.red('unsorted') if had_changes else mothermayi.colors.green('sorted')
def pre_commit(config, staged):
changes = [do_sort(filename) for filename in staged]
messages = [get_status(had_change) for had_change in changes]
lines = [" {0:<30} ... {1:<10}".format(filename, message) for filename, message in itertools.izip(staged, messages)]
result = "\n".join(lines)
if any(changes):
raise mothermayi.errors.FailHook(result)
return result
| from isort import SortImports
import mothermayi.colors
import mothermayi.errors
def plugin():
return {
'name' : 'isort',
'pre-commit' : pre_commit,
}
def do_sort(filename):
results = SortImports(filename)
return results.in_lines != results.out_lines
def get_status(had_changes):
return mothermayi.colors.red('unsorted') if had_changes else mothermayi.colors.green('sorted')
def pre_commit(config, staged):
changes = [do_sort(filename) for filename in staged]
messages = [get_status(had_change) for had_change in changes]
lines = [" {0:<30} ... {1:<10}".format(filename, message) for filename, message in zip(staged, messages)]
result = "\n".join(lines)
if any(changes):
raise mothermayi.errors.FailHook(result)
return result
| Make plugin work in python 3 | Make plugin work in python 3
Python 3 doesn't have itertools.izip, just the builtin, zip. This logic
allows us to pull out either one depending on python version
| Python | mit | EliRibble/mothermayi-isort |
014c8ca68b196c78b9044b194b762cdb3dfe6c78 | app/hooks/views.py | app/hooks/views.py | from __future__ import absolute_import
from __future__ import unicode_literals
from app import app, webhooks
@webhooks.hook(
app.config.get('GITLAB_HOOK','/hooks/gitlab'),
handler='gitlab')
class Gitlab:
def issue(self, data):
pass
def push(self, data):
pass
def tag_push(self, data):
pass
def merge_request(self, data):
pass
def commit_comment(self, data):
pass
def issue_comment(self, data):
pass
def merge_request_comment(self, data):
pass
def snippet_comment(self, data):
pass
| from __future__ import absolute_import
from __future__ import unicode_literals
from app import app, webhooks
@webhooks.hook(
app.config.get('GITLAB_HOOK','/hooks/gitlab'),
handler='gitlab')
class Gitlab:
def issue(self, data):
# if the repository belongs to a group check if a channel with the same
# name (lowercased and hyphened) exists
# Check if a channel with the same repository name exists
# If the channel exists post to that channel
# If not post to general or other defined by configuration
# publish the issue to the found channel including the Title, Message
# and the creator and responsible if defined
pass
def push(self, data):
# Read commit list to update commit count for user
pass
def tag_push(self, data):
# Publish news of the new version of the repo in general
pass
def merge_request(self, data):
# Notify in the channel
pass
def commit_comment(self, data):
# Notify comment and receiver in the channel
pass
def issue_comment(self, data):
# Notify comment and receiver in the channel
pass
def merge_request_comment(self, data):
# Notify comment and receiver in the channel
pass
def snippet_comment(self, data):
# Do nothing for now
pass
| Add comment description of methods for gitlab hook | Add comment description of methods for gitlab hook
| Python | apache-2.0 | pipex/gitbot,pipex/gitbot,pipex/gitbot |
6ecada90e944ee976197e0ee79baf1d711a20803 | cla_public/apps/base/forms.py | cla_public/apps/base/forms.py | # -*- coding: utf-8 -*-
"Base forms"
from flask_wtf import Form
from wtforms import StringField, TextAreaField
from cla_public.apps.base.fields import MultiRadioField
from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \
HELP_FILLING_IN_FORM
class FeedbackForm(Form):
difficulty = TextAreaField(u'Did you have any difficulty with this service?')
ideas = TextAreaField(u'Do you have any ideas for how it could be improved?')
feel_about_service = MultiRadioField(
u'Overall, how did you feel about the service you received today?',
choices=FEEL_ABOUT_SERVICE)
help_filling_in_form = MultiRadioField(
u'Did you have any help filling in this form?',
choices=HELP_FILLING_IN_FORM)
| # -*- coding: utf-8 -*-
"Base forms"
from flask_wtf import Form
from wtforms import StringField, TextAreaField
from cla_public.apps.base.fields import MultiRadioField
from cla_public.apps.base.constants import FEEL_ABOUT_SERVICE, \
HELP_FILLING_IN_FORM
from cla_public.apps.checker.honeypot import Honeypot
class FeedbackForm(Honeypot, Form):
difficulty = TextAreaField(u'Did you have any difficulty with this service?')
ideas = TextAreaField(u'Do you have any ideas for how it could be improved?')
feel_about_service = MultiRadioField(
u'Overall, how did you feel about the service you received today?',
choices=FEEL_ABOUT_SERVICE)
help_filling_in_form = MultiRadioField(
u'Did you have any help filling in this form?',
choices=HELP_FILLING_IN_FORM)
| Add honeypot field to feedback form | Add honeypot field to feedback form
| Python | mit | ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public |
4c76a99e1d72820a367d2195fbd3edc1b0af30fd | organizer/models.py | organizer/models.py | from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField()
link = models.URLField()
startup = models.ForeignKey(Startup)
| from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField()
link = models.URLField()
startup = models.ForeignKey(Startup)
| Add options to Tag model fields. | Ch03: Add options to Tag model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 |
e2fbf646b193284fc5d01684193b9c5aeb415efe | generate_html.py | generate_html.py | from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output:
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
))
| from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file:
key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file]))
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
key_fields_json=json.dumps(key_fields),
))
| Fix due to merge conflicts | Fix due to merge conflicts
| Python | agpl-3.0 | TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine |
0ed9e159fa606c9dbdb90dfc64fcb357e9f9cedb | plenum/test/test_request.py | plenum/test/test_request.py | from indy_common.types import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == [] | from plenum.common.request import Request
def test_request_all_identifiers_returns_empty_list_for_request_without_signatures():
req = Request()
assert req.all_identifiers == [] | Fix wrong import in test | Fix wrong import in test
Signed-off-by: Sergey Khoroshavin <[email protected]>
| Python | apache-2.0 | evernym/zeno,evernym/plenum |
a16fd23027b5d3f1378f5b9f75958d0f3ef2a124 | bandit/__init__.py | bandit/__init__.py | """
django-email-bandit is a Django email backend for hijacking email sending in a test environment.
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
| """
django-email-bandit is a Django email backend for hijacking email sending in a test environment.
"""
__version_info__ = {
'major': 1,
'minor': 0,
'micro': 0,
'releaselevel': 'dev',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
| Bump version number to reflect dev status. | Bump version number to reflect dev status.
| Python | bsd-3-clause | caktus/django-email-bandit,vericant/django-email-bandit,caktus/django-email-bandit |
4f170397acac08c6fd8a4573ead1f66d631ac8dc | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.1'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.2.dev0'
| Update dsub version to 0.3.2.dev0 | Update dsub version to 0.3.2.dev0
PiperOrigin-RevId: 243855458
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub |
f604979e94fab59eb1b422d4e62ad62d3360c2ac | onserver/urls.py | onserver/urls.py | # -*- coding: utf-8 -*-
"""onserver URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| # -*- coding: utf-8 -*-
"""onserver URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^', admin.site.urls),
]
| Use admin interface by default | Use admin interface by default
| Python | mit | on-server/on-server-api,on-server/on-server-api |
0241e253c68ca6862a3da26d29a649f65c27ae36 | demos/chatroom/experiment.py | demos/chatroom/experiment.py | """Coordination chatroom game."""
import dallinger as dlgr
from dallinger.config import get_config
try:
unicode = unicode
except NameError: # Python 3
unicode = str
config = get_config()
def extra_settings():
config.register('network', unicode)
config.register('n', int)
class CoordinationChatroom(dlgr.experiments.Experiment):
"""Define the structure of the experiment."""
def __init__(self, session):
"""Initialize the experiment."""
super(CoordinationChatroom, self).__init__(session)
self.experiment_repeats = 1
self.num_participants = config.get('n')
self.initial_recruitment_size = self.num_participants
self.quorum = self.num_participants
self.config = config
if not self.config.ready:
self.config.load_config()
self.setup()
def create_network(self):
"""Create a new network by reading the configuration file."""
class_ = getattr(
dlgr.networks,
self.config.get('network')
)
return class_(max_size=self.num_participants)
def info_post_request(self, node, info):
"""Run when a request to create an info is complete."""
for agent in node.neighbors():
node.transmit(what=info, to_whom=agent)
def create_node(self, participant, network):
"""Create a node for a participant."""
return dlgr.nodes.Agent(network=network, participant=participant)
| """Coordination chatroom game."""
import dallinger as dlgr
from dallinger.compat import unicode
from dallinger.config import get_config
config = get_config()
def extra_settings():
config.register('network', unicode)
config.register('n', int)
class CoordinationChatroom(dlgr.experiments.Experiment):
"""Define the structure of the experiment."""
def __init__(self, session):
"""Initialize the experiment."""
super(CoordinationChatroom, self).__init__(session)
self.experiment_repeats = 1
self.num_participants = config.get('n')
self.initial_recruitment_size = self.num_participants
self.quorum = self.num_participants
self.config = config
if not self.config.ready:
self.config.load_config()
self.setup()
def create_network(self):
"""Create a new network by reading the configuration file."""
class_ = getattr(
dlgr.networks,
self.config.get('network')
)
return class_(max_size=self.num_participants)
def info_post_request(self, node, info):
"""Run when a request to create an info is complete."""
for agent in node.neighbors():
node.transmit(what=info, to_whom=agent)
def create_node(self, participant, network):
"""Create a node for a participant."""
return dlgr.nodes.Agent(network=network, participant=participant)
| Use compat for unicode import | Use compat for unicode import
| Python | mit | Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger |
8033b00ebbcb8e294f47ee558e76ee260ec18d2b | orglog-config.py | orglog-config.py | org = "servo"
ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss",
"libhubbub", "libparserutils", "libwapcaplet", "pixman"]
count_forks = ["glutin","rust-openssl"]
# Path to where we'll dump the bare checkouts. Must end in /
clones_dir = "repos/"
# Path to the concatenated log
log_path = "log.txt"
# Nuke the clones_dir afterwards?
destroy_clones = True
| org = "servo"
ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss",
"libhubbub", "libparserutils", "libwapcaplet", "pixman",
"libfreetype2"]
count_forks = ["glutin","rust-openssl"]
# Path to where we'll dump the bare checkouts. Must end in /
clones_dir = "repos/"
# Path to the concatenated log
log_path = "log.txt"
# Nuke the clones_dir afterwards?
destroy_clones = True
| Remove libfreetype2, which should have been omitted and was breaking the scripts | Remove libfreetype2, which should have been omitted and was breaking the scripts
| Python | mit | servo/servo-org-stats,servo/servo-org-stats,servo/servo-org-stats |
1dfff48a5ddb910b4abbcf8e477b3dda9d606a49 | scripts/maf_split_by_src.py | scripts/maf_split_by_src.py | #!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
| #!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
| Allow splitting by a particular component (by index) | Allow splitting by a particular component (by index)
| Python | mit | bxlab/bx-python,bxlab/bx-python,bxlab/bx-python |
ead9192b4c2acb21df917dfe116785343e9a59a6 | scripts/patches/transfer.py | scripts/patches/transfer.py | patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
]
| patches = [
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType",
"value": "String",
},
{
"op": "move",
"from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
},
{
"op": "replace",
"path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType",
"value": "String",
},
]
| Fix spec issue with Transfer::Server ProtocolDetails | Fix spec issue with Transfer::Server ProtocolDetails
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere |
4fe19797ba2fb12239ae73da60bb3e726b23ffe9 | web/forms.py | web/forms.py | from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import UniqueEmailUser
class UniqueEmailUserCreationForm(UserCreationForm):
"""
A form that creates a UniqueEmailUser.
"""
def __init__(self, *args, **kargs):
super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs)
del self.fields['username']
class Meta:
model = UniqueEmailUser
fields = ("email",)
class UniqueEmailUserChangeForm(UserChangeForm):
"""
A form for updating a UniqueEmailUser.
"""
def __init__(self, *args, **kargs):
super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs)
del self.fields['username']
class Meta:
model = UniqueEmailUser
fields = ("email",)
| from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from .models import UniqueEmailUser
class UniqueEmailUserCreationForm(UserCreationForm):
"""
A form that creates a UniqueEmailUser.
"""
class Meta:
model = UniqueEmailUser
fields = ("email",)
class UniqueEmailUserChangeForm(UserChangeForm):
"""
A form for updating a UniqueEmailUser.
"""
class Meta:
model = UniqueEmailUser
fields = ("email",)
| Fix bug in admin user editing | Fix bug in admin user editing
Fixes KeyError when creating or editing a UniqueEmailUser in the admin
interface.
| Python | mit | uppercounty/uppercounty,uppercounty/uppercounty,uppercounty/uppercounty |
89225ed0c7ec627ee32fd973d5f1fb95da173be2 | djangae/contrib/locking/memcache.py | djangae/contrib/locking/memcache.py | import random
import time
from datetime import datetime
from django.core.cache import cache
class MemcacheLock(object):
def __init__(self, identifier, cache, unique_value):
self.identifier = identifier
self._cache = cache
self.unique_value = unique_value
@classmethod
def acquire(cls, identifier, wait=True, steal_after_ms=None):
start_time = datetime.utcnow()
unique_value = random.randint(1, 100000)
while True:
acquired = cache.add(identifier, unique_value)
if acquired:
return cls(identifier, cache, unique_value)
elif not wait:
return None
else:
# We are waiting for the lock
if steal_after_ms and (datetime.utcnow() - start_time).total_seconds() * 1000 > steal_after_ms:
# Steal anyway
cache.set(identifier, unique_value)
return cls(identifier, cache, unique_value)
time.sleep(0)
def release(self):
cache = self._cache
# Delete the key if it was ours. There is a race condition here
# if something steals the lock between the if and the delete...
if cache.get(self.identifier) == self.unique_value:
cache.delete(self.identifier)
| import random
import time
from datetime import datetime
from django.core.cache import cache
class MemcacheLock(object):
def __init__(self, identifier, unique_value):
self.identifier = identifier
self.unique_value = unique_value
@classmethod
def acquire(cls, identifier, wait=True, steal_after_ms=None):
start_time = datetime.utcnow()
unique_value = random.randint(1, 100000)
while True:
acquired = cache.add(identifier, unique_value)
if acquired:
return cls(identifier, unique_value)
elif not wait:
return None
else:
# We are waiting for the lock
if steal_after_ms and (datetime.utcnow() - start_time).total_seconds() * 1000 > steal_after_ms:
# Steal anyway
cache.set(identifier, unique_value)
return cls(identifier, unique_value)
time.sleep(0)
def release(self):
# Delete the key if it was ours. There is a race condition here
# if something steals the lock between the if and the delete...
if cache.get(self.identifier) == self.unique_value:
cache.delete(self.identifier)
| Remove pointless `_cache` attribute on MemcacheLock class. | Remove pointless `_cache` attribute on MemcacheLock class.
If this was doing anything useful, I have no idea what it was.
| Python | bsd-3-clause | potatolondon/djangae,potatolondon/djangae |
a715821c75521e25172805c98d204fc4e24a4641 | CodeFights/circleOfNumbers.py | CodeFights/circleOfNumbers.py | #!/usr/local/bin/python
# Code Fights Circle of Numbers Problem
def circleOfNumbers(n, firstNumber):
pass
def main():
tests = [
["crazy", "dsbaz"],
["z", "a"]
]
for t in tests:
res = circleOfNumbers(t[0], t[1])
if t[2] == res:
print("PASSED: circleOfNumbers({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: circleOfNumbers({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
| #!/usr/local/bin/python
# Code Fights Circle of Numbers Problem
def circleOfNumbers(n, firstNumber):
mid = n / 2
return (mid + firstNumber if firstNumber < mid else firstNumber - mid)
def main():
tests = [
[10, 2, 7],
[10, 7, 2],
[4, 1, 3],
[6, 3, 0]
]
for t in tests:
res = circleOfNumbers(t[0], t[1])
if t[2] == res:
print("PASSED: circleOfNumbers({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: circleOfNumbers({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, t[2]))
if __name__ == '__main__':
main()
| Solve Code Fights circle of numbers problem | Solve Code Fights circle of numbers problem
| Python | mit | HKuz/Test_Code |
9ac662557d6313190621c0c84a2c6923e0e9fa72 | nodeconductor/logging/middleware.py | nodeconductor/logging/middleware.py | from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
context = get_event_context() or {}
context.update(user._get_log_context('user'))
set_event_context(context)
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| Update event context instead of replace (NC-529) | Update event context instead of replace (NC-529)
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor |
93a95afe231910d9f683909994692fadaf107057 | readme_renderer/markdown.py | readme_renderer/markdown.py | # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import markdown
from .clean import clean
def render(raw):
rendered = markdown.markdown(
raw,
extensions=[
'markdown.extensions.codehilite',
'markdown.extensions.fenced_code',
'markdown.extensions.smart_strong',
])
return clean(rendered or raw), bool(rendered)
| # Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import markdown
from .clean import clean
def render(raw):
rendered = markdown.markdown(
raw,
extensions=[
'markdown.extensions.codehilite',
'markdown.extensions.fenced_code',
'markdown.extensions.smart_strong',
])
if rendered:
return clean(rendered)
else:
return None
| Make md.render have the same API as rst.render | Make md.render have the same API as rst.render
| Python | apache-2.0 | pypa/readme,pypa/readme_renderer |
345ccc9d503e6e55fe46d7813958c0081cc1cffe | openstack_dashboard/views.py | openstack_dashboard/views.py | # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import views
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
| # Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from horizon import base
from openstack_auth import forms
def get_user_home(user):
dashboard = None
if user.is_superuser:
try:
dashboard = horizon.get_dashboard('admin')
except base.NotRegistered:
pass
if dashboard is None:
dashboard = horizon.get_default_dashboard()
return dashboard.get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(horizon.get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
| Fix issues with importing the Login form | Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
| Python | apache-2.0 | Mirantis/mos-horizon,endorphinl/horizon,RudoCris/horizon,davidcusatis/horizon,Daniex/horizon,watonyweng/horizon,tqtran7/horizon,sandvine/horizon,openstack/horizon,mdavid/horizon,davidcusatis/horizon,maestro-hybrid-cloud/horizon,VaneCloud/horizon,CiscoSystems/avos,Dark-Hacker/horizon,luhanhan/horizon,RudoCris/horizon,froyobin/horizon,xme1226/horizon,Mirantis/mos-horizon,CiscoSystems/horizon,tellesnobrega/horizon,Daniex/horizon,redhat-cip/horizon,Daniex/horizon,karthik-suresh/horizon,nvoron23/avos,davidcusatis/horizon,liyitest/rr,wangxiangyu/horizon,JioCloud/horizon,wangxiangyu/horizon,vladryk/horizon,yeming233/horizon,kfox1111/horizon,wangxiangyu/horizon,Metaswitch/horizon,JioCloud/horizon,dan1/horizon-x509,gerrive/horizon,Metaswitch/horizon,saydulk/horizon,aaronorosen/horizon-congress,j4/horizon,promptworks/horizon,watonyweng/horizon,Dark-Hacker/horizon,FNST-OpenStack/horizon,blueboxgroup/horizon,blueboxgroup/horizon,Hodorable/0602,yjxtogo/horizon,ging/horizon,damien-dg/horizon,endorphinl/horizon-fork,agileblaze/OpenStackTwoFactorAuthentication,tellesnobrega/horizon,VaneCloud/horizon,yjxtogo/horizon,xinwu/horizon,ging/horizon,kfox1111/horizon,liyitest/rr,eayunstack/horizon,takeshineshiro/horizon,newrocknj/horizon,promptworks/horizon,yjxtogo/horizon,nvoron23/avos,doug-fish/horizon,promptworks/horizon,bac/horizon,noironetworks/horizon,CiscoSystems/avos,ging/horizon,tqtran7/horizon,coreycb/horizon,mrunge/horizon_lib,karthik-suresh/horizon,zouyapeng/horizon,yeming233/horizon,orbitfp7/horizon,endorphinl/horizon-fork,mdavid/horizon,karthik-suresh/horizon,wolverineav/horizon,redhat-cip/horizon,dan1/horizon-proto,newrocknj/horizon,BiznetGIO/horizon,idjaw/horizon,RudoCris/horizon,redhat-cip/horizon,Tesora/tesora-horizon,izadorozhna/dashboard_integration_tests,mdavid/horizon,icloudrnd/automation_tools,agileblaze/OpenStackTwoFactorAuthentication,yeming233/horizon,NCI-Cloud/horizon,Tesora/tesora-horizon,ChameleonCloud/horizon,Solinea/horizon,tsufiev/horizon,doug-fish/horizon,NeCTAR-RC/horizon,Dark-Hacker/horizon,gerrive/horizon,django-leonardo/horizon,philoniare/horizon,flochaz/horizon,flochaz/horizon,idjaw/horizon,nvoron23/avos,openstack/horizon,Tesora/tesora-horizon,maestro-hybrid-cloud/horizon,liyitest/rr,NCI-Cloud/horizon,agileblaze/OpenStackTwoFactorAuthentication,mrunge/horizon_lib,yjxtogo/horizon,NCI-Cloud/horizon,bigswitch/horizon,redhat-openstack/horizon,saydulk/horizon,takeshineshiro/horizon,dan1/horizon-proto,redhat-openstack/horizon,dan1/horizon-proto,tsufiev/horizon,mrunge/horizon_lib,froyobin/horizon,anthonydillon/horizon,pranavtendolkr/horizon,bigswitch/horizon,mandeepdhami/horizon,doug-fish/horizon,kfox1111/horizon,BiznetGIO/horizon,takeshineshiro/horizon,Hodorable/0602,NeCTAR-RC/horizon,sandvine/horizon,NCI-Cloud/horizon,wolverineav/horizon,Solinea/horizon,sandvine/horizon,django-leonardo/horizon,NeCTAR-RC/horizon,mrunge/horizon,mandeepdhami/horizon,bigswitch/horizon,gerrive/horizon,blueboxgroup/horizon,Dark-Hacker/horizon,idjaw/horizon,icloudrnd/automation_tools,xme1226/horizon,RudoCris/horizon,noironetworks/horizon,mrunge/horizon,ging/horizon,JioCloud/horizon,takeshineshiro/horizon,blueboxgroup/horizon,Mirantis/mos-horizon,FNST-OpenStack/horizon,newrocknj/horizon,j4/horizon,mrunge/horizon,henaras/horizon,mrunge/openstack_horizon,Metaswitch/horizon,aaronorosen/horizon-congress,bac/horizon,kfox1111/horizon,FNST-OpenStack/horizon,flochaz/horizon,wangxiangyu/horizon,luhanhan/horizon,orbitfp7/horizon,endorphinl/horizon,aaronorosen/horizon-congress,django-leonardo/horizon,doug-fish/horizon,openstack/horizon,zouyapeng/horizon,damien-dg/horizon,endorphinl/horizon-fork,Tesora/tesora-horizon,anthonydillon/horizon,endorphinl/horizon,Solinea/horizon,FNST-OpenStack/horizon,BiznetGIO/horizon,watonyweng/horizon,noironetworks/horizon,henaras/horizon,watonyweng/horizon,promptworks/horizon,dan1/horizon-x509,wolverineav/horizon,karthik-suresh/horizon,luhanhan/horizon,Hodorable/0602,noironetworks/horizon,Daniex/horizon,maestro-hybrid-cloud/horizon,froyobin/horizon,VaneCloud/horizon,zouyapeng/horizon,philoniare/horizon,eayunstack/horizon,bac/horizon,gerrive/horizon,ChameleonCloud/horizon,django-leonardo/horizon,CiscoSystems/horizon,mandeepdhami/horizon,redhat-cip/horizon,anthonydillon/horizon,vladryk/horizon,CiscoSystems/horizon,xinwu/horizon,BiznetGIO/horizon,endorphinl/horizon-fork,tsufiev/horizon,eayunstack/horizon,redhat-openstack/horizon,dan1/horizon-x509,wolverineav/horizon,ChameleonCloud/horizon,agileblaze/OpenStackTwoFactorAuthentication,redhat-openstack/horizon,pranavtendolkr/horizon,sandvine/horizon,tqtran7/horizon,maestro-hybrid-cloud/horizon,henaras/horizon,CiscoSystems/horizon,mrunge/openstack_horizon,saydulk/horizon,tsufiev/horizon,Solinea/horizon,bac/horizon,pranavtendolkr/horizon,icloudrnd/automation_tools,tqtran7/horizon,luhanhan/horizon,Metaswitch/horizon,mandeepdhami/horizon,saydulk/horizon,nvoron23/avos,damien-dg/horizon,tellesnobrega/horizon,CiscoSystems/avos,zouyapeng/horizon,mrunge/openstack_horizon,xinwu/horizon,NeCTAR-RC/horizon,liyitest/rr,icloudrnd/automation_tools,vladryk/horizon,newrocknj/horizon,j4/horizon,coreycb/horizon,anthonydillon/horizon,j4/horizon,idjaw/horizon,coreycb/horizon,ChameleonCloud/horizon,vladryk/horizon,orbitfp7/horizon,endorphinl/horizon,damien-dg/horizon,orbitfp7/horizon,xinwu/horizon,dan1/horizon-x509,Hodorable/0602,openstack/horizon,VaneCloud/horizon,Mirantis/mos-horizon,xme1226/horizon,henaras/horizon,CiscoSystems/avos,philoniare/horizon,yeming233/horizon,mdavid/horizon,davidcusatis/horizon,pranavtendolkr/horizon,tellesnobrega/horizon,coreycb/horizon,philoniare/horizon,izadorozhna/dashboard_integration_tests,flochaz/horizon,dan1/horizon-proto,bigswitch/horizon |
2279aa0c450d53b04f774d9441e4fc0647466581 | bottery/message.py | bottery/message.py | import os
from datetime import datetime
import attr
from jinja2 import Environment, FileSystemLoader, select_autoescape
@attr.s
class Message:
id = attr.ib()
platform = attr.ib()
user = attr.ib()
text = attr.ib()
timestamp = attr.ib()
raw = attr.ib()
@property
def datetime(self):
return datetime.utcfromtimestamp(self.timestamp)
def render(message, template_name, context={}):
base_dir = os.path.join(os.getcwd(), 'templates')
paths = [base_dir]
# Include paths on settings
# paths.extend(settings.TEMPLATES)
env = Environment(
loader=FileSystemLoader(paths),
autoescape=select_autoescape(['html']))
template = env.get_template(template_name)
default_context = {
'user': message.user
}
default_context.update(context)
return template.render(**default_context)
| import os
from datetime import datetime
import attr
from jinja2 import Environment, FileSystemLoader, select_autoescape
@attr.s
class Message:
id = attr.ib()
platform = attr.ib()
user = attr.ib()
text = attr.ib()
timestamp = attr.ib()
raw = attr.ib()
@property
def datetime(self):
return datetime.utcfromtimestamp(self.timestamp)
def render(message, template_name, context={}):
base_dir = os.path.join(os.getcwd(), 'templates')
paths = [base_dir]
# Include paths on settings
# paths.extend(settings.TEMPLATES)
env = Environment(
loader=FileSystemLoader(paths),
autoescape=select_autoescape(['html']))
template = env.get_template(template_name)
default_context = {
'user': message.user,
'platform': message.platform,
}
default_context.update(context)
return template.render(**default_context)
| Send platform name to defaul template context | Send platform name to defaul template context
| Python | mit | rougeth/bottery |
22b697729d1ee43d322aa1187b3a5f6101f836a5 | odin/__init__.py | odin/__init__.py | __authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
# Disable logging if an explicit handler is not added
try:
import logging
logging.getLogger('odin').addHandler(logging.NullHandler())
except AttributeError:
pass # Fallback for python 2.6
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
| # Disable logging if an explicit handler is not added
import logging
logging.getLogger('odin.registration').addHandler(logging.NullHandler())
__authors__ = "Tim Savage"
__author_email__ = "[email protected]"
__copyright__ = "Copyright (C) 2014 Tim Savage"
__version__ = "1.0"
from odin.fields import * # noqa
from odin.fields.composite import * # noqa
from odin.fields.virtual import * # noqa
from odin.mapping import * # noqa
from odin.resources import Resource # noqa
from odin.adapters import ResourceAdapter # noqa
| Remove Python 2.6 backwards compatibility | Remove Python 2.6 backwards compatibility
| Python | bsd-3-clause | python-odin/odin |
59daf205869c42b3797aa9dbaaa97930cbca2417 | nanshe_workflow/ipy.py | nanshe_workflow/ipy.py | __author__ = "John Kirkham <[email protected]>"
__date__ = "$Nov 10, 2015 17:09$"
try:
from IPython.utils.shimmodule import ShimWarning
except ImportError:
class ShimWarning(Warning):
"""Warning issued by IPython 4.x regarding deprecated API."""
pass
import warnings
with warnings.catch_warnings():
warnings.filterwarnings('error', '', ShimWarning)
try:
# IPython 3
from IPython.html.widgets import FloatProgress
from IPython.parallel import Client
except ShimWarning:
# IPython 4
from ipywidgets import FloatProgress
from ipyparallel import Client
from IPython.display import display
| __author__ = "John Kirkham <[email protected]>"
__date__ = "$Nov 10, 2015 17:09$"
import json
import re
try:
from IPython.utils.shimmodule import ShimWarning
except ImportError:
class ShimWarning(Warning):
"""Warning issued by IPython 4.x regarding deprecated API."""
pass
import warnings
with warnings.catch_warnings():
warnings.filterwarnings('error', '', ShimWarning)
try:
# IPython 3
from IPython.html.widgets import FloatProgress
from IPython.parallel import Client
except ShimWarning:
# IPython 4
from ipywidgets import FloatProgress
from ipyparallel import Client
from IPython.display import display
import ipykernel
import notebook.notebookapp
import requests
def check_nbserverproxy():
"""
Return the url of the current jupyter notebook server.
"""
kernel_id = re.search(
"kernel-(.*).json",
ipykernel.connect.get_connection_file()
).group(1)
servers = notebook.notebookapp.list_running_servers()
for s in servers:
response = requests.get(
requests.compat.urljoin(s["url"], "api/sessions"),
params={"token": s.get("token", "")}
)
for n in json.loads(response.text):
if n["kernel"]["id"] == kernel_id:
# Found server that is running this Jupyter Notebook.
# Try to requests this servers port through nbserverproxy.
url = requests.compat.urljoin(
s["url"], "proxy/%i" % s["port"]
)
# If the proxy is running, it will redirect.
# If not, it will error out.
try:
requests.get(url).raise_for_status()
except requests.HTTPError:
return False
else:
return True
| Add function to check if nbserverproxy is running | Add function to check if nbserverproxy is running
Provides a simple check to see if the `nbserverproxy` is installed and
running. As this is a Jupyter server extension and this code is run from
the notebook, we can't simply import `nbserverproxy`. In fact that
wouldn't even work when using the Python 2 kernel even though the proxy
server could be running.
Instead to solve this problem try to identify the Jupyter Notebook
server we are running under. Once identified, attempt to query the proxy
server with the port of the Jupyter Notebook server. If the proxy server
is running, this will merely redirect to the Jupyter Notebook server and
return an HTTP 200 status. However if the proxy server is not running,
this will return a HTTP 404 error. There may be other errors that it
could raise. In any event, if the proxy redirects us, we know it is
working and if not we know it doesn't work.
| Python | apache-2.0 | nanshe-org/nanshe_workflow,DudLab/nanshe_workflow |
99d16198b5b61ba13a441a6546ccd1f7ce0b91bc | test/symbols/show_glyphs.py | test/symbols/show_glyphs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
devicons_start = "e700"
devicons_end = "e7c5"
print "Devicons"
for ii in xrange(int(devicons_start, 16), int(devicons_end, 16) + 1):
print unichr(ii),
custom_start = "e5fa"
custom_end = "e62b"
print "\nCustom"
for ii in xrange(int(custom_start, 16), int(custom_end, 16) + 1):
print unichr(ii),
font_awesome_start = "f000"
font_awesome_end = "f295"
print "\nFont Awesome"
for ii in xrange(int(font_awesome_start, 16), int(font_awesome_end, 16) + 1):
print unichr(ii),
powerline_start = "e0a0"
powerline_end = "e0d4"
print "\nPowerline"
for ii in xrange(int(powerline_start, 16), int(powerline_end, 16) + 1):
print unichr(ii),
octicons_start = "f400"
octicons_end = "f4e5"
print "\nOcticons"
for ii in xrange(int(octicons_start, 16), int(octicons_end, 16) + 1):
print unichr(ii),
| Add octicons in font test script | Add octicons in font test script
| Python | mit | mkofinas/prompt-support,mkofinas/prompt-support |
2daeee0b9fabb4f1ad709bdbe9c8c12a6281d32d | axis/__main__.py | axis/__main__.py | """Read events and parameters from your Axis device."""
import asyncio
import argparse
import logging
import sys
from axis import AxisDevice
async def main(args):
loop = asyncio.get_event_loop()
device = AxisDevice(
loop=loop, host=args.host, username=args.username,
password=args.password, port=args.port)
if args.params:
await loop.run_in_executor(None, device.vapix.initialize_params)
await loop.run_in_executor(None, device.vapix.initialize_ports)
await loop.run_in_executor(None, device.vapix.initialize_users)
if not args.events:
return
if args.events:
device.start()
try:
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
pass
finally:
device.stop()
if __name__ == "__main__":
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('host', type=str)
parser.add_argument('username', type=str)
parser.add_argument('password', type=str)
parser.add_argument('-p', '--port', type=int, default=80)
parser.add_argument('--events', action='store_true')
parser.add_argument('--params', action='store_true')
args = parser.parse_args()
asyncio.run(main(args))
| """Read events and parameters from your Axis device."""
import asyncio
import argparse
import logging
import sys
from axis import AxisDevice
async def main(args):
loop = asyncio.get_event_loop()
device = AxisDevice(
loop=loop, host=args.host, username=args.username,
password=args.password, port=args.port)
if args.params:
await loop.run_in_executor(None, device.vapix.initialize_params)
await loop.run_in_executor(None, device.vapix.initialize_ports)
await loop.run_in_executor(None, device.vapix.initialize_users)
if not args.events:
return
if args.events:
def event_handler(action, event):
print(action, event)
device.enable_events(event_callback=event_handler)
device.start()
try:
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
pass
finally:
device.stop()
if __name__ == "__main__":
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument('host', type=str)
parser.add_argument('username', type=str)
parser.add_argument('password', type=str)
parser.add_argument('-p', '--port', type=int, default=80)
parser.add_argument('--events', action='store_true')
parser.add_argument('--params', action='store_true')
args = parser.parse_args()
asyncio.run(main(args))
| Fix main failing on no event_callback | Fix main failing on no event_callback
| Python | mit | Kane610/axis |
c35e004ae3b2b9b8338673078f8ee523ac79e005 | alg_shell_sort.py | alg_shell_sort.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _gap_insertion_sort(a_list, start, gap):
for i in range(start + gap, len(a_list), gap):
current_value = a_list[i]
position = i
while (position >= gap) and (a_list[position - gap] > current_value):
a_list[position] = a_list[position - gap]
position = position - gap
a_list[position] = current_value
def shell_sort(a_list):
"""Shell Sort algortihm."""
sublist_count = len(a_list) // 2
while sublist_count > 0:
for start_pos in range(sublist_count):
_gap_insertion_sort(a_list, start_pos, sublist_count)
print('After increments of size {0}, a_list is \n{1}'
.format(sublist_count, a_list))
sublist_count = sublist_count // 2
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: \n{}'.format(a_list))
print('By Shell Sort: ')
shell_sort(a_list)
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _gap_insertion_sort(a_list, start, gap):
for i in range(start + gap, len(a_list), gap):
current_value = a_list[i]
position = i
while (position >= gap) and (a_list[position - gap] > current_value):
a_list[position] = a_list[position - gap]
position = position - gap
a_list[position] = current_value
def shell_sort(a_list):
"""Shell Sort algortihm."""
sublist_count = len(a_list) // 2
while sublist_count > 0:
for start_pos in range(sublist_count):
_gap_insertion_sort(a_list, start_pos, sublist_count)
print('After increments of size {0}:\n{1}'
.format(sublist_count, a_list))
sublist_count = sublist_count // 2
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By Shell Sort: ')
shell_sort(a_list)
if __name__ == '__main__':
main()
| Revise print() in shell_sort() & main() | Revise print() in shell_sort() & main()
| Python | bsd-2-clause | bowen0701/algorithms_data_structures |
29061254e99f8e02e8285c3ebc965866c8c9d378 | testing/chess_engine_fight.py | testing/chess_engine_fight.py | #!/usr/bin/python
import subprocess, os, sys
if len(sys.argv) < 2:
print('Must specify file names of 2 chess engines')
for i in range(len(sys.argv)):
print(str(i) + ': ' + sys.argv[i])
sys.exit(1)
generator = './' + sys.argv[-2]
checker = './' + sys.argv[-1]
game_file = 'game.pgn'
count = 0
while True:
try:
os.remove(game_file)
except OSError:
pass
count += 1
print('Game #' + str(count))
out = subprocess.run([generator, '-random', '-random'])
if not os.path.isfile(game_file):
print('Game file not produced: ' + game_file)
print('generator = ' + generator)
print(out.returncode)
print(out.stdout)
print(out.stderr)
sys.exit()
result = subprocess.run([checker, '-confirm', game_file])
if result.returncode != 0:
print('Found discrepancy. See ' + game_file)
print('generator = ' + generator)
print('checker = ' + checker)
sys.exit()
generator, checker = checker, generator
| #!/usr/bin/python
import subprocess, os, sys
if len(sys.argv) < 2:
print('Must specify file names of 2 chess engines')
for i in range(len(sys.argv)):
print(str(i) + ': ' + sys.argv[i])
sys.exit(1)
generator = './' + sys.argv[-2]
checker = './' + sys.argv[-1]
game_file = 'game.pgn'
count = 0
while True:
try:
os.remove(game_file)
except OSError:
pass
if os.path.isfile(game_file):
print('Could not delete output file:', game_file)
count += 1
print('Game #' + str(count))
out = subprocess.run([generator, '-random', '-random'])
if not os.path.isfile(game_file):
print('Game file not produced: ' + game_file)
print('generator = ' + generator)
print(out.returncode)
print(out.stdout)
print(out.stderr)
sys.exit()
result = subprocess.run([checker, '-confirm', game_file])
if result.returncode != 0:
print('Found discrepancy. See ' + game_file)
print('generator = ' + generator)
print('checker = ' + checker)
sys.exit()
generator, checker = checker, generator
| Check that engine fight files are deleted before test | Check that engine fight files are deleted before test
| Python | mit | MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess |
8c637c0f70908a00713014ef2d2ff72e3d5a81dc | prerequisites.py | prerequisites.py | #!/usr/bin/env python
import sys
# Check that we are in an activated virtual environment
try:
import os
virtual_env = os.environ['VIRTUAL_ENV']
except KeyError:
print("It doesn't look like you are in an activated virtual environment.")
print("Did you make one?")
print("Did you activate it?")
sys.exit(1)
# Check that we have installed Django
try:
import django
except ImportError:
print("It doesn't look like Django is installed.")
print("Are you in an activated virtual environment?")
print("Did you pip install from requirements.txt?")
sys.exit(1)
# Check that we have the expected version of Django
expected_version = (1, 7, 1)
try:
assert django.VERSION[:3] == expected_version
except AssertionError:
print("It doesn't look like you have the expected version "
"of Django installed.")
print("You have {0}".format('.'.join([str(i) for i in django.VERSION][:3])))
sys.exit(1)
# All good, have fun!
print("Everything looks okay to me... Have fun!")
| #!/usr/bin/env python
import sys
# Check that we are in an activated virtual environment
try:
import os
virtual_env = os.environ['VIRTUAL_ENV']
except KeyError:
print("It doesn't look like you are in an activated virtual environment.")
print("Did you make one?")
print("Did you activate it?")
sys.exit(1)
# Check that we have installed Django
try:
import django
except ImportError:
print("It doesn't look like Django is installed.")
print("Are you in an activated virtual environment?")
print("Did you pip install from requirements.txt?")
sys.exit(1)
# Check that we have the expected version of Django
expected_version = '1.7.1'
installed_version = django.get_version()
try:
assert installed_version == expected_version
except AssertionError:
print("It doesn't look like you have the expected version "
"of Django installed.")
print("You have {0}.".format(installed_version))
sys.exit(1)
# All good, have fun!
print("Everything looks okay to me... Have fun!")
| Use django.get_version in prerequisite checker | Use django.get_version in prerequisite checker
| Python | mit | mpirnat/django-tutorial-v2 |
2a724872cba5c48ddbd336f06460aa2ad851c6d0 | Pilot3/P3B5/p3b5.py | Pilot3/P3B5/p3b5.py | import os
import candle
file_path = os.path.dirname(os.path.realpath(__file__))
lib_path2 = os.path.abspath(os.path.join(file_path, '..', '..', 'common'))
sys.path.append(lib_path2)
REQUIRED = [
'learning_rate',
'learning_rate_min',
'momentum',
'weight_decay',
'grad_clip',
'seed',
'unrolled',
'batch_size',
'epochs',
]
class BenchmarkP3B5(candle.Benchmark):
""" Benchmark for P3B5 """
def set_locals(self):
""" Set parameters for the benchmark.
Args:
required: set of required parameters for the benchmark.
"""
if REQUIRED is not None:
self.required = set(REQUIRED)
| import os
import sys
import candle
file_path = os.path.dirname(os.path.realpath(__file__))
lib_path2 = os.path.abspath(os.path.join(file_path, '..', '..', 'common'))
sys.path.append(lib_path2)
REQUIRED = [
'learning_rate',
'learning_rate_min',
'momentum',
'weight_decay',
'grad_clip',
'seed',
'unrolled',
'batch_size',
'epochs',
]
class BenchmarkP3B5(candle.Benchmark):
""" Benchmark for P3B5 """
def set_locals(self):
""" Set parameters for the benchmark.
Args:
required: set of required parameters for the benchmark.
"""
if REQUIRED is not None:
self.required = set(REQUIRED)
| Fix missing import for sys | Fix missing import for sys
| Python | mit | ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks,ECP-CANDLE/Benchmarks |
7729c90679a74f268d7b0fd88c954fb583830794 | parser.py | parser.py | import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def parse(self, url):
content = webquery.urlcontent(url)
root = etree.HTML(content, base_url=url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = url
return data
| import webquery
from lxml import etree
import inspect
from expression import Expression
from collections import defaultdict
class Parser(object):
registry = defaultdict(dict)
@classmethod
def __init_subclass__(cls):
for name, member in inspect.getmembers(cls):
if isinstance(member, Expression):
cls.registry[cls.__name__][name] = member
@property
def fields(self):
cls = self.__class__
return cls.registry[cls.__name__]
def canonical_url(self, url):
"""By overriding this method canonical url can be used"""
return url
def parse(self, url):
canonical_url = self.canonical_url(url)
content = webquery.urlcontent(canonical_url)
root = etree.HTML(content, base_url=canonical_url)
data = {name: expr.parse(root) for name, expr in self.fields.items()}
data['url'] = canonical_url
return data
| Add ability to customize URL | Add ability to customize URL
| Python | apache-2.0 | shiplu/webxpath |
b6813731696a03e04367ea3286092320391080e9 | puresnmp/__init__.py | puresnmp/__init__.py | """
This module contains the high-level functions to access the library. Care is
taken to make this as pythonic as possible and hide as many of the gory
implementations as possible.
"""
from x690.types import ObjectIdentifier
# !!! DO NOT REMOVE !!! The following import triggers the processing of SNMP
# Types and thus populates the Registry. If this is not included, Non x.690
# SNMP types will not be properly detected!
import puresnmp.types
from puresnmp.api.pythonic import PyWrapper
from puresnmp.api.raw import Client
from puresnmp.credentials import V1, V2C, V3
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata # type: ignore
__version__ = importlib_metadata.version("puresnmp")
__all__ = [
"Client",
"ObjectIdentifier",
"PyWrapper",
"V1",
"V2C",
"V3",
"__version__",
]
| """
This module contains the high-level functions to access the library. Care is
taken to make this as pythonic as possible and hide as many of the gory
implementations as possible.
"""
from x690.types import ObjectIdentifier
# !!! DO NOT REMOVE !!! The following import triggers the processing of SNMP
# Types and thus populates the Registry. If this is not included, Non x.690
# SNMP types will not be properly detected!
import puresnmp.types
from puresnmp.api.pythonic import PyWrapper
from puresnmp.api.raw import Client
from puresnmp.credentials import V1, V2C, V3
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata # type: ignore
__version__ = importlib_metadata.version("puresnmp") # type: ignore
__all__ = [
"Client",
"ObjectIdentifier",
"PyWrapper",
"V1",
"V2C",
"V3",
"__version__",
]
| Fix false-positive of a type-check | Fix false-positive of a type-check
| Python | mit | exhuma/puresnmp,exhuma/puresnmp |
bf5dd490cec02827d51c887506ce1f55d5012893 | astropy/tests/image_tests.py | astropy/tests/image_tests.py | import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
# The developer versions of the form 3.1.x+... contain changes that will only
# be included in the 3.2.x release, so we update this here.
if MPL_VERSION[:3] == '3.1' and '+' in MPL_VERSION:
MPL_VERSION = '3.2'
ROOT = "http://{server}/testing/astropy/2018-10-24T12:38:34.134556/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
| import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
# The developer versions of the form 3.1.x+... contain changes that will only
# be included in the 3.2.x release, so we update this here.
if MPL_VERSION[:3] == '3.1' and '+' in MPL_VERSION:
MPL_VERSION = '3.2'
ROOT = "http://{server}/testing/astropy/2019-08-02T11:38:58.288466/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
| Update URL for baseline images | Update URL for baseline images | Python | bsd-3-clause | mhvk/astropy,StuartLittlefair/astropy,larrybradley/astropy,StuartLittlefair/astropy,pllim/astropy,astropy/astropy,aleksandr-bakanov/astropy,mhvk/astropy,saimn/astropy,dhomeier/astropy,StuartLittlefair/astropy,stargaser/astropy,dhomeier/astropy,stargaser/astropy,mhvk/astropy,StuartLittlefair/astropy,bsipocz/astropy,dhomeier/astropy,bsipocz/astropy,lpsinger/astropy,larrybradley/astropy,lpsinger/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,bsipocz/astropy,larrybradley/astropy,saimn/astropy,lpsinger/astropy,lpsinger/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,saimn/astropy,astropy/astropy,larrybradley/astropy,pllim/astropy,astropy/astropy,MSeifert04/astropy,stargaser/astropy,saimn/astropy,MSeifert04/astropy,pllim/astropy,MSeifert04/astropy,stargaser/astropy,astropy/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,mhvk/astropy,StuartLittlefair/astropy,pllim/astropy,saimn/astropy |
030e64d7aee6c3f0b3a0d0508ac1d5ece0bf4a40 | astroquery/fermi/__init__.py | astroquery/fermi/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Access to Fermi Gamma-ray Space Telescope data.
http://fermi.gsfc.nasa.gov
http://fermi.gsfc.nasa.gov/ssc/data/
"""
from astropy.config import ConfigurationItem
FERMI_URL = ConfigurationItem('fermi_url',
['http://fermi.gsfc.nasa.gov/cgi-bin/ssc/LAT/LATDataQuery.cgi'],
"Fermi query URL")
FERMI_TIMEOUT = ConfigurationItem('timeout', 60, 'time limit for connecting to FERMI server')
FERMI_RETRIEVAL_TIMEOUT = ConfigurationItem('retrieval_timeout', 120, 'time limit for retrieving a data file once it has been located')
from .core import FermiLAT, GetFermilatDatafile, get_fermilat_datafile
import warnings
warnings.warn("Experimental: Fermi-LAT has not yet been refactored to have its API match the rest of astroquery.")
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Access to Fermi Gamma-ray Space Telescope data.
http://fermi.gsfc.nasa.gov
http://fermi.gsfc.nasa.gov/ssc/data/
"""
from astropy.config import ConfigurationItem
FERMI_URL = ConfigurationItem('fermi_url',
['http://fermi.gsfc.nasa.gov/cgi-bin/ssc/LAT/LATDataQuery.cgi'],
"Fermi query URL")
FERMI_TIMEOUT = ConfigurationItem('timeout', 60, 'time limit for connecting to FERMI server')
FERMI_RETRIEVAL_TIMEOUT = ConfigurationItem('retrieval_timeout', 120, 'time limit for retrieving a data file once it has been located')
from .core import FermiLAT, GetFermilatDatafile, get_fermilat_datafile
import warnings
warnings.warn("Experimental: Fermi-LAT has not yet been refactored to have its API match the rest of astroquery.")
del ConfigurationItem # clean up namespace - prevents doc warnings
| Clean up namespace to get rid of sphinx warnings | Clean up namespace to get rid of sphinx warnings
| Python | bsd-3-clause | imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery |
3ae40027f42f6228489d2bbc5c2da53c7df8387a | babybuddy/settings/heroku.py | babybuddy/settings/heroku.py | import os
import dj_database_url
from .base import * # noqa: F401,F403
DEBUG = False
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['SECRET_KEY']
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(conn_max_age=500)
}
# Email
SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', None) # noqa: F405
SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', None) # noqa: F405
# Use SendGrid if we have the addon installed, else just print to console which
# is accessible via Heroku logs
if SENDGRID_USERNAME and SENDGRID_PASSWORD:
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = SENDGRID_USERNAME
EMAIL_HOST_PASSWORD = SENDGRID_PASSWORD
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_TIMEOUT = 60
else:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| import os
import dj_database_url
from .base import * # noqa: F401,F403
DEBUG = os.environ.get('DEBUG', False)
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ['SECRET_KEY']
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(conn_max_age=500)
}
# Email
SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', None) # noqa: F405
SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', None) # noqa: F405
# Use SendGrid if we have the addon installed, else just print to console which
# is accessible via Heroku logs
if SENDGRID_USERNAME and SENDGRID_PASSWORD:
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = SENDGRID_USERNAME
EMAIL_HOST_PASSWORD = SENDGRID_PASSWORD
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_TIMEOUT = 60
else:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
| Add a DEBUG environment option to Heroku settings. | Add a DEBUG environment option to Heroku settings.
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy |
ef98ba0f2aa660b85a4116d46679bf30321f2a05 | scipy/spatial/transform/__init__.py | scipy/spatial/transform/__init__.py | """
Spatial Transformations (:mod:`scipy.spatial.transform`)
========================================================
.. currentmodule:: scipy.spatial.transform
This package implements various spatial transformations. For now,
only rotations are supported.
Rotations in 3 dimensions
-------------------------
.. autosummary::
:toctree: generated/
Rotation
Slerp
RotationSpline
"""
from __future__ import division, print_function, absolute_import
from .rotation import Rotation, Slerp
from ._rotation_spline import RotationSpline
__all__ = ['Rotation', 'Slerp']
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| """
Spatial Transformations (:mod:`scipy.spatial.transform`)
========================================================
.. currentmodule:: scipy.spatial.transform
This package implements various spatial transformations. For now,
only rotations are supported.
Rotations in 3 dimensions
-------------------------
.. autosummary::
:toctree: generated/
Rotation
Slerp
RotationSpline
"""
from __future__ import division, print_function, absolute_import
from .rotation import Rotation, Slerp
from ._rotation_spline import RotationSpline
__all__ = ['Rotation', 'Slerp', 'RotationSpline']
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| Add RotationSpline into __all__ of spatial.transform | MAINT: Add RotationSpline into __all__ of spatial.transform
| Python | bsd-3-clause | grlee77/scipy,pizzathief/scipy,endolith/scipy,Eric89GXL/scipy,gertingold/scipy,aeklant/scipy,anntzer/scipy,tylerjereddy/scipy,ilayn/scipy,scipy/scipy,matthew-brett/scipy,jor-/scipy,endolith/scipy,ilayn/scipy,person142/scipy,Eric89GXL/scipy,nmayorov/scipy,lhilt/scipy,arokem/scipy,endolith/scipy,ilayn/scipy,WarrenWeckesser/scipy,gertingold/scipy,e-q/scipy,vigna/scipy,arokem/scipy,perimosocordiae/scipy,Eric89GXL/scipy,jor-/scipy,zerothi/scipy,anntzer/scipy,lhilt/scipy,zerothi/scipy,jor-/scipy,anntzer/scipy,Stefan-Endres/scipy,tylerjereddy/scipy,arokem/scipy,zerothi/scipy,gertingold/scipy,aarchiba/scipy,Eric89GXL/scipy,WarrenWeckesser/scipy,ilayn/scipy,lhilt/scipy,vigna/scipy,e-q/scipy,arokem/scipy,perimosocordiae/scipy,lhilt/scipy,mdhaber/scipy,e-q/scipy,grlee77/scipy,nmayorov/scipy,rgommers/scipy,mdhaber/scipy,person142/scipy,aeklant/scipy,endolith/scipy,anntzer/scipy,Stefan-Endres/scipy,matthew-brett/scipy,WarrenWeckesser/scipy,jor-/scipy,aeklant/scipy,scipy/scipy,tylerjereddy/scipy,Eric89GXL/scipy,andyfaff/scipy,scipy/scipy,perimosocordiae/scipy,aeklant/scipy,mdhaber/scipy,WarrenWeckesser/scipy,scipy/scipy,jamestwebber/scipy,jamestwebber/scipy,Stefan-Endres/scipy,jamestwebber/scipy,aarchiba/scipy,pizzathief/scipy,person142/scipy,mdhaber/scipy,matthew-brett/scipy,lhilt/scipy,rgommers/scipy,e-q/scipy,pizzathief/scipy,zerothi/scipy,rgommers/scipy,andyfaff/scipy,vigna/scipy,rgommers/scipy,anntzer/scipy,matthew-brett/scipy,WarrenWeckesser/scipy,aarchiba/scipy,aarchiba/scipy,Stefan-Endres/scipy,arokem/scipy,rgommers/scipy,tylerjereddy/scipy,jamestwebber/scipy,e-q/scipy,person142/scipy,ilayn/scipy,ilayn/scipy,jamestwebber/scipy,aeklant/scipy,andyfaff/scipy,scipy/scipy,Stefan-Endres/scipy,scipy/scipy,vigna/scipy,Eric89GXL/scipy,grlee77/scipy,pizzathief/scipy,andyfaff/scipy,gertingold/scipy,andyfaff/scipy,anntzer/scipy,vigna/scipy,perimosocordiae/scipy,grlee77/scipy,grlee77/scipy,andyfaff/scipy,WarrenWeckesser/scipy,perimosocordiae/scipy,aarchiba/scipy,endolith/scipy,zerothi/scipy,zerothi/scipy,nmayorov/scipy,gertingold/scipy,mdhaber/scipy,Stefan-Endres/scipy,matthew-brett/scipy,jor-/scipy,pizzathief/scipy,tylerjereddy/scipy,perimosocordiae/scipy,mdhaber/scipy,nmayorov/scipy,nmayorov/scipy,endolith/scipy,person142/scipy |
4c85300c5458053ac08a393b00513c80baf28031 | reqon/deprecated/__init__.py | reqon/deprecated/__init__.py | import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(query['$query'], reql)
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
| import rethinkdb as r
from . import coerce, geo, operators, terms
from .coerce import COERSIONS
from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS
from .terms import TERMS
from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError
def query(query):
try:
reql = r.db(query['$db']).table(query['$table'])
except KeyError:
try:
reql = r.table(query['$table'])
except KeyError:
raise ReqonError('The query descriptor requires a $table key.')
return build_terms(reql, query['$query'])
def build_terms(reql, query):
for sequence in query:
term = sequence[0]
try:
reql = TERMS[term](reql, *sequence[1:])
except ReqonError:
raise
except r.ReqlError:
message = 'Invalid values for {0} with args {1}'
raise ReqonError(message.format(term, sequence[1:]))
except Exception:
message = 'Unknown exception, {0}: {1}'
raise ReqonError(message.format(term, sequence[1:]))
return reql
| Fix arguments order of reqon.deprecated.build_terms(). | Fix arguments order of reqon.deprecated.build_terms().
| Python | mit | dmpayton/reqon |
b659db91c0f4230d1c8ed69dd0cd2697fb573b85 | playerAction.py | playerAction.py | import mcpi.minecraft as minecraft
import time
mc = minecraft.Minecraft.create()
while True:
pos = mc.player.getPos()
x = pos.x
y = pos.y
z = pos.z
# Display position
#print x, y, z
time.sleep(2)
if x >= 343.300 and y <= 344.700 and z <= -301.300 and z >= -302.700 and y == 4:
print "Active action"
mc.postToChat("Active action")
| import mcpi.minecraft as minecraft
import time
mc = minecraft.Minecraft.create()
while True:
pos = mc.player.getPos()
x = pos.x
y = pos.y
z = pos.z
# Display position
#print x, y, z
time.sleep(2)
if x >= 343.300 and x <= 344.700 and z <= -301.300 and z >= -302.700 and y == 4:
print "Active action"
mc.postToChat("Active action")
| Update player position action script | Update player position action script
| Python | mit | Nekrofage/MinecraftPython |
a388280d56bb73e64dbea2244e210878d9371984 | NagiosWrapper/NagiosWrapper.py | NagiosWrapper/NagiosWrapper.py | import subprocess
nagiosPluginsCommandLines = [
"/usr/lib64/nagios/plugins/check_sensors",
"/usr/lib64/nagios/plugins/check_mailq -w 10 -c 20 -M postfix",
]
class NagiosWrapper:
def __init__(self, agentConfig, checksLogger, rawConfig):
self.agentConfig = agentConfig
self.checksLogger = checksLogger
self.rawConfig = rawConfig
def run(self):
data = {}
for pluginCommandLine in nagiosPluginsCommandLines:
# subprocess needs a list containing the command and
# its parameters
pluginCommandLineList = pluginCommandLine.split(" ")
# the check command to retrieve it's name
pluginCommand = pluginCommandLineList[0]
p = subprocess.Popen(
pluginCommandLineList,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = p.communicate()
self.checksLogger.debug('Output of {}: {}'.format(pluginCommand, out))
if err:
self.checksLogger.error('Error executing {}: {}'.format(
pluginCommand, err))
# the check command name = return value:
# 0 - OK
# 1 - WARNING
# 2 - CRITICAL
# 3 - UNKNOWN
data[pluginCommand.split("/")[-1]] = p.returncode
# add performance data if it exists
perfData = out.split("|")
if len(perfData) > 1:
data[perfData[1].split(";")[0].split("=")[0]] = perfData[
1].split(";")[0].split("=")[1]
return data
| import subprocess
nagiosPluginsCommandLines = [
"/usr/lib64/nagios/plugins/check_sensors",
"/usr/lib64/nagios/plugins/check_mailq -w 10 -c 20 -M postfix",
]
class NagiosWrapper:
def __init__(self, agentConfig, checksLogger, rawConfig):
self.agentConfig = agentConfig
self.checksLogger = checksLogger
self.rawConfig = rawConfig
def run(self):
data = {}
for pluginCommandLine in nagiosPluginsCommandLines:
# subprocess needs a list containing the command and
# its parameters
pluginCommandLineList = pluginCommandLine.split(" ")
# the check command to retrieve it's name
pluginCommand = pluginCommandLineList[0]
p = subprocess.Popen(
pluginCommandLineList,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
out, err = p.communicate()
self.checksLogger.debug('Output of {0}: {1}'.format(pluginCommand, out))
if err:
self.checksLogger.error(
'Error executing {0}: {1}'.format(pluginCommand, err)
)
# the check command name = return value:
# 0 - OK
# 1 - WARNING
# 2 - CRITICAL
# 3 - UNKNOWN
data[pluginCommand.split("/")[-1]] = p.returncode
# add performance data if it exists
perfData = out.split("|")
if len(perfData) > 1:
data[perfData[1].split(";")[0].split("=")[0]] = perfData[
1].split(";")[0].split("=")[1]
return data
| Add support for Python 2.6 | Add support for Python 2.6 | Python | bsd-3-clause | bastiendonjon/sd-agent-plugins,shanethehat/sd-agent-plugins,bencer/sd-agent-plugins,shanethehat/sd-agent-plugins,bencer/sd-agent-plugins,bastiendonjon/sd-agent-plugins |
05715aca84152c78cf0b4d5d7b751ecfa3a9f35a | tinyblog/views/__init__.py | tinyblog/views/__init__.py | from datetime import datetime
from django.http import Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.views.generic import (
ArchiveIndexView,
YearArchiveView,
MonthArchiveView,
)
from tinyblog.models import Post
def post(request, year, month, slug):
post = get_object_or_404(Post, created__year=year, created__month=month,
slug=slug)
if post.created > datetime.now():
if not request.user.is_staff:
raise Http404
return render_to_response('tinyblog/post.html',
{'post': post},
context_instance=RequestContext(request))
class TinyBlogIndexView(ArchiveIndexView):
date_field = 'created'
def get_queryset(self):
return Post.published_objects.all()
index_view = TinyBlogIndexView.as_view()
class TinyBlogYearView(YearArchiveView):
date_field = 'created'
make_object_list = True
def get_queryset(self):
return Post.published_objects.all()
year_view = TinyBlogYearView.as_view()
class TinyBlogMonthView(MonthArchiveView):
date_field = 'created'
month_format = '%m'
def get_queryset(self):
return Post.published_objects.all()
month_view = TinyBlogMonthView.as_view()
| from datetime import datetime
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.views.generic import (
ArchiveIndexView,
YearArchiveView,
MonthArchiveView,
DetailView,
)
from tinyblog.models import Post
class TinyBlogPostView(DetailView):
template_name = 'tinyblog/post.html'
def get_object(self):
post = get_object_or_404(
Post,
created__year=int(self.kwargs['year']),
created__month=int(self.kwargs['month']),
slug=self.kwargs['slug']
)
if post.created > datetime.now():
if not self.request.user.is_staff:
raise Http404
return post
post = TinyBlogPostView.as_view()
class TinyBlogIndexView(ArchiveIndexView):
date_field = 'created'
def get_queryset(self):
return Post.published_objects.all()
index_view = TinyBlogIndexView.as_view()
class TinyBlogYearView(YearArchiveView):
date_field = 'created'
make_object_list = True
def get_queryset(self):
return Post.published_objects.all()
year_view = TinyBlogYearView.as_view()
class TinyBlogMonthView(MonthArchiveView):
date_field = 'created'
month_format = '%m'
def get_queryset(self):
return Post.published_objects.all()
month_view = TinyBlogMonthView.as_view()
| Switch the main post detail view to a CBV | Switch the main post detail view to a CBV
| Python | bsd-3-clause | dominicrodger/tinyblog,dominicrodger/tinyblog |
fcf626b6cb898bba294f8f4e2ecd2ff57cd144a0 | scripts/syscalls.py | scripts/syscalls.py | import sim, syscall_strings, platform
if platform.architecture()[0] == '64bit':
__syscall_strings = syscall_strings.syscall_strings_64
else:
__syscall_strings = syscall_strings.syscall_strings_32
def syscall_name(syscall_number):
return '%s[%d]' % (__syscall_strings.get(syscall_number, 'unknown'), syscall_number)
class LogSyscalls:
def hook_syscall_enter(self, threadid, coreid, time, syscall_number, args):
print '[SYSCALL] @%10d ns: %-27s thread(%3d) core(%3d) args%s' % (time/1e6, syscall_name(syscall_number), threadid, coreid, args)
def hook_syscall_exit(self, threadid, coreid, time, ret_val, emulated):
print '[SYSCALL] @%10d ns: exit thread(%3d) core(%3d) ret_val(%d) emulated(%s)' % (time/1e6, threadid, coreid, ret_val, emulated)
sim.util.register(LogSyscalls())
| import sim, syscall_strings, sys
if sys.maxsize == 2**31-1:
__syscall_strings = syscall_strings.syscall_strings_32
else:
__syscall_strings = syscall_strings.syscall_strings_64
def syscall_name(syscall_number):
return '%s[%d]' % (__syscall_strings.get(syscall_number, 'unknown'), syscall_number)
class LogSyscalls:
def hook_syscall_enter(self, threadid, coreid, time, syscall_number, args):
print '[SYSCALL] @%10d ns: %-27s thread(%3d) core(%3d) args%s' % (time/1e6, syscall_name(syscall_number), threadid, coreid, args)
def hook_syscall_exit(self, threadid, coreid, time, ret_val, emulated):
print '[SYSCALL] @%10d ns: exit thread(%3d) core(%3d) ret_val(%d) emulated(%s)' % (time/1e6, threadid, coreid, ret_val, emulated)
sim.util.register(LogSyscalls())
| Use different way to determine 32/64-bit which returns mode of current binary, not of the system | [scripts] Use different way to determine 32/64-bit which returns mode of current binary, not of the system
| Python | mit | abanaiyan/sniper,abanaiyan/sniper,abanaiyan/sniper,abanaiyan/sniper,abanaiyan/sniper |
4d641110454f114d1f179d306fb63166e66fd6cf | src/foremast/slacknotify/slack_notification.py | src/foremast/slacknotify/slack_notification.py | """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
self.info = {'app': app, 'env': env, 'properties': prop_path}
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info['timestamp'] = timestamp
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info = {'app': app,
'env': env,
'properties': prop_path,
'timestamp': timestamp}
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| Move timestamp before dict for insertion | fix: Move timestamp before dict for insertion
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast |
7e11e57ee4f9fc1dc3c967c9b2d26038a7727f72 | wqflask/wqflask/database.py | wqflask/wqflask/database.py | # Module to initialize sqlalchemy with flask
import os
import sys
from string import Template
from typing import Tuple
from urllib.parse import urlparse
import importlib
import MySQLdb
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
def read_from_pyfile(pyfile, setting):
orig_sys_path = sys.path[:]
sys.path.insert(0, os.path.dirname(pyfile))
module = importlib.import_module(os.path.basename(pyfile).strip(".py"))
sys.path = orig_sys_path[:]
return module.__dict__.get(setting)
def sql_uri():
"""Read the SQL_URI from the environment or settings file."""
return os.environ.get(
"SQL_URI", read_from_pyfile(
os.environ.get(
"GN2_SETTINGS", os.path.abspath("../etc/default_settings.py")),
"SQL_URI"))
def parse_db_url(sql_uri: str) -> Tuple:
"""
Parse SQL_URI env variable from an sql URI
e.g. 'mysql://user:pass@host_name/db_name'
"""
parsed_db = urlparse(sql_uri)
return (
parsed_db.hostname, parsed_db.username, parsed_db.password,
parsed_db.path[1:], parsed_db.port)
def database_connection():
"""Returns a database connection"""
host, user, passwd, db_name, port = parse_db_url(sql_uri())
return MySQLdb.connect(
db=db_name, user=user, passwd=passwd, host=host, port=port)
| # Module to initialize sqlalchemy with flask
import os
import sys
from string import Template
from typing import Tuple
from urllib.parse import urlparse
import importlib
import MySQLdb
def sql_uri():
"""Read the SQL_URI from the environment or settings file."""
return os.environ.get(
"SQL_URI", read_from_pyfile(
os.environ.get(
"GN2_SETTINGS", os.path.abspath("../etc/default_settings.py")),
"SQL_URI"))
def parse_db_url(sql_uri: str) -> Tuple:
"""
Parse SQL_URI env variable from an sql URI
e.g. 'mysql://user:pass@host_name/db_name'
"""
parsed_db = urlparse(sql_uri)
return (
parsed_db.hostname, parsed_db.username, parsed_db.password,
parsed_db.path[1:], parsed_db.port)
def database_connection():
"""Returns a database connection"""
host, user, passwd, db_name, port = parse_db_url(sql_uri())
return MySQLdb.connect(
db=db_name, user=user, passwd=passwd, host=host, port=port)
| Delete unused function and imports. | Delete unused function and imports.
* wqflask/wqflask/database.py: Remove unused sqlalchemy imports.
(read_from_pyfile): Delete it.
| Python | agpl-3.0 | genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2 |
bdcca9f505c185fa0ade4e93a88b8dabc85f9176 | pysearch/urls.py | pysearch/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^search/', include('search.urls')),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pysearch.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^search/', include('search.urls')),
)
| Remove access to admin site | Remove access to admin site
| Python | mit | nh0815/PySearch,nh0815/PySearch |
5c787be025ca99da339aae221b714bd1d8f2d0bd | route/station.py | route/station.py | from flask import request
from flask.ext import restful
from route.base import api
from model.base import db
from model.user import User
import logging
class StationAPI(restful.Resource):
def post(self):
data = request.get_json()
station = Station(data['name'], data['address'], data['address2'], data['town'], data['district'], data['lat'], data['lng'], data['bike_stands'], data['banking'])
db.session.add(station)
db.session.commit()
return Station.query.first()
api.add_resource(StationAPI, "/station")
| from flask import request
from flask.ext import restful
from route.base import api
from model.base import db
from model.user import User
import logging
class StationAPI(restful.Resource):
def post(self):
data = request.get_json()
station = Station(data['name'], data['address'], data['address2'], data['town'], data['district'], data['lat'], data['lng'], data['bike_stands'], data['banking'])
db.session.add(station)
db.session.commit()
return Station.query.first()
def get(self, station_id):
data = request.get
api.add_resource(StationAPI, "/station")
| Add start of get funtion | Add start of get funtion
| Python | mit | hexa4313/velov-companion-server,hexa4313/velov-companion-server |
d64e85f96483e6b212adca38ca5fa89c64508701 | froide_campaign/listeners.py | froide_campaign/listeners.py | from .models import Campaign, InformationObject
def connect_info_object(sender, **kwargs):
reference = kwargs.get('reference')
if reference is None:
return
if 'campaign' not in reference:
return
try:
campaign, slug = reference['campaign'].split('@', 1)
except (ValueError, IndexError):
return
try:
campaign_pk = int(campaign)
except ValueError:
return
try:
campaign = Campaign.objects.get(pk=campaign_pk)
except Campaign.DoesNotExist:
return
try:
iobj = InformationObject.objects.get(campaign=campaign, slug=slug)
except InformationObject.DoesNotExist:
return
if iobj.foirequest is not None:
return
if iobj.publicbody != sender.public_body:
return
if not sender.public:
return
iobj.foirequest = sender
iobj.save()
| from .models import Campaign, InformationObject
def connect_info_object(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if not reference.startswith('campaign:'):
return
namespace, campaign_value = reference.split(':', 1)
try:
campaign, slug = campaign_value.split('@', 1)
except (ValueError, IndexError):
return
try:
campaign_pk = int(campaign)
except ValueError:
return
try:
campaign = Campaign.objects.get(pk=campaign_pk)
except Campaign.DoesNotExist:
return
try:
iobj = InformationObject.objects.get(campaign=campaign, slug=slug)
except InformationObject.DoesNotExist:
return
if iobj.foirequest is not None:
return
if iobj.publicbody != sender.public_body:
return
if not sender.public:
return
iobj.foirequest = sender
iobj.save()
| Adjust to new reference handling | Adjust to new reference handling | Python | mit | okfde/froide-campaign,okfde/froide-campaign,okfde/froide-campaign |
b5fa4f9eb11575ddd8838bc53817854de831337f | dumpling/views.py | dumpling/views.py | from django.conf import settings
from django.shortcuts import get_object_or_404
from django.views.generic import DetailView
from .models import Page
class PageView(DetailView):
context_object_name = 'page'
def get_queryset(self):
return Page.objects.published().prefetch_related('pagewidget__widget')
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
paths = list(filter(None, self.kwargs.get('path', '/').split('/')))
if not paths:
paths = ['']
paths.reverse()
query = {}
prefix = 'path'
for step in paths:
query[prefix] = step
prefix = 'parent__' + prefix
query[prefix.replace('path', 'isnull')] = True
return get_object_or_404(queryset, **query)
def get_template_names(self):
return self.object.template[len(settings.USER_TEMPLATES_PATH):]
#
# Management Interface
#
| from django.conf import settings
from django.shortcuts import get_object_or_404, render
from django.views.generic import DetailView
from .models import Page
class PageView(DetailView):
context_object_name = 'page'
def get_queryset(self):
return Page.objects.published().prefetch_related('pagewidget_set__widget')
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
paths = list(filter(None, self.kwargs.get('path', '/').split('/')))
if not paths:
paths = ['']
paths.reverse()
query = {}
prefix = 'path'
for step in paths:
query[prefix] = step
prefix = 'parent__' + prefix
query[prefix.replace('path', 'isnull')] = True
return get_object_or_404(queryset, **query)
def get_template_names(self):
return self.object.template[len(settings.USER_TEMPLATES_PATH):]
def styles(request, name):
namespace = Namespace()
for tv in ThemeValue.objects.all():
namespace.set_variable('${}-{}'.format(tv.group, tv.name), String(tv.value))
compiler = Compiler(namespace=namespace)
return compiler.compile_string(src)
| Fix prefetch. Add styles view | Fix prefetch. Add styles view
| Python | mit | funkybob/dumpling,funkybob/dumpling |
5cf66e26259f5b4c78e61530822fa19dfc117206 | settings_test.py | settings_test.py | INSTALLED_APPS = (
'oauth_tokens',
'taggit',
'vkontakte_groups',
)
OAUTH_TOKENS_VKONTAKTE_CLIENT_ID = 3430034
OAUTH_TOKENS_VKONTAKTE_CLIENT_SECRET = 'b0FwzyKtO8QiQmgWQMTz'
OAUTH_TOKENS_VKONTAKTE_SCOPE = ['ads,wall,photos,friends,stats']
OAUTH_TOKENS_VKONTAKTE_USERNAME = '+919665223715'
OAUTH_TOKENS_VKONTAKTE_PASSWORD = 'githubovich'
OAUTH_TOKENS_VKONTAKTE_PHONE_END = '96652237' | INSTALLED_APPS = (
'oauth_tokens',
'taggit',
'vkontakte_groups',
)
OAUTH_TOKENS_VKONTAKTE_CLIENT_ID = 3430034
OAUTH_TOKENS_VKONTAKTE_CLIENT_SECRET = 'b0FwzyKtO8QiQmgWQMTz'
OAUTH_TOKENS_VKONTAKTE_SCOPE = ['ads,wall,photos,friends,stats']
OAUTH_TOKENS_VKONTAKTE_USERNAME = '+919665223715'
OAUTH_TOKENS_VKONTAKTE_PASSWORD = 'githubovich'
OAUTH_TOKENS_VKONTAKTE_PHONE_END = '96652237'
# Set VK API Timeout
VKONTAKTE_API_REQUEST_TIMEOUT = 7
| Fix RuntimeError: maximum recursion depth | Fix RuntimeError: maximum recursion depth
| Python | bsd-3-clause | ramusus/django-vkontakte-groups-statistic,ramusus/django-vkontakte-groups-statistic,ramusus/django-vkontakte-groups-statistic |
a81fbdd334dc475554e77bbb71ae00985f2d23c4 | eventlog/stats.py | eventlog/stats.py | from datetime import datetime, timedelta
from django.contrib.auth.models import User
def stats():
return {
"used_site_last_thirty_days": User.objects.filter(log__timestamp__gt=datetime.now() - timedelta(days=30)).distinct().count(),
"used_site_last_seven_days": User.objects.filter(log__timestamp__gt=datetime.now() - timedelta(days=7)).distinct().count()
}
| from datetime import datetime, timedelta
from django.contrib.auth.models import User
def used_active(days):
used = User.objects.filter(
log__timestamp__gt=datetime.now() - timedelta(days=days)
).distinct().count()
active = User.objects.filter(
log__timestamp__gt=datetime.now() - timedelta(days=days)
).exclude(
date_joined__gt=datetime.now() - timedelta(days=days)
).distinct().count()
return used, active
def stats():
used_seven, active_seven = used_active(7)
used_thirty, active_thirty = used_active(30)
return {
"used_seven": used_seven,
"used_thirty": used_thirty,
"active_seven": active_seven,
"active_thirty": active_thirty
}
| Add active_seven and active_thirty users | Add active_seven and active_thirty users
| Python | bsd-3-clause | ConsumerAffairs/django-eventlog-ca,rosscdh/pinax-eventlog,KleeTaurus/pinax-eventlog,jawed123/pinax-eventlog,pinax/pinax-eventlog |
850803d02868e20bc637f777ee201ac778c63606 | lms/djangoapps/edraak_misc/utils.py | lms/djangoapps/edraak_misc/utils.py | from courseware.access import has_access
from django.conf import settings
def is_certificate_allowed(user, course):
return (course.has_ended()
and settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE')
or has_access(user, 'staff', course.id))
| from courseware.access import has_access
from django.conf import settings
def is_certificate_allowed(user, course):
if not settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE'):
return False
return course.has_ended() or has_access(user, 'staff', course.id)
| Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False | Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False
| Python | agpl-3.0 | Edraak/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/edx-platform |
b3a144e9dfba915d186fd1243515172780611689 | models/waifu_model.py | models/waifu_model.py | from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
| from models.base_model import BaseModel
from datetime import datetime
from models.user_model import UserModel
from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField
WAIFU_SHARING_STATUS_PRIVATE = 1
WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2
WAIFU_SHARING_STATUS_PUBLIC = 3
class WaifuModel(BaseModel):
class Meta:
db_table = 'waifus'
name = CharField(max_length=128, null=False)
description = TextField(null=False)
pic = CharField(max_length=128, null=False)
created_at = DateTimeField(null=False, default=datetime.now)
updated_at = DateTimeField(null=False, default=datetime.now)
rating = IntegerField(null=False, default=0)
sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE)
owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
def to_json(self):
json = super(WaifuModel, self).to_json()
json['users_count'] = self.users.count()
return json
| Add users count to json representation. | Add users count to json representation.
| Python | cc0-1.0 | sketchturnerr/WaifuSim-backend,sketchturnerr/WaifuSim-backend |
0474872ea9db994928fa6848b89b847b4fc80986 | smst/__init__.py | smst/__init__.py | __version__ = '0.2.0'
| # _ _
# ___ _ __ ___ ___ | |_ ___ ___ | |___
# / __| '_ ` _ \/ __| | __/ _ \ / _ \| / __|
# \__ \ | | | | \__ \ | || (_) | (_) | \__ \
# |___/_| |_| |_|___/ \__\___/ \___/|_|___/
#
# ~ Spectral Modeling Synthesis Tools ~
#
__version__ = '0.2.0'
| Add a nice banner made using the figlet tool. | Add a nice banner made using the figlet tool.
| Python | agpl-3.0 | bzamecnik/sms-tools,bzamecnik/sms-tools,bzamecnik/sms-tools |
f6841a527bd8b52aa88c4c3b5980a0001387f33e | scoring/models/regressors.py | scoring/models/regressors.py | from sklearn.ensemble import RandomForestRegressor as randomforest
from sklearn.svm import SVR as svm
from sklearn.pls import PLSRegression as pls
from .neuralnetwork import neuralnetwork
__all__ = ['randomforest', 'svm', 'pls', 'neuralnetwork']
| from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
from sklearn.pls import PLSRegression
from .neuralnetwork import neuralnetwork
__all__ = ['randomforest', 'svm', 'pls', 'neuralnetwork']
class randomforest(RandomForestRegressor):
pass
class svm(SVR):
pass
class svm(PLSRegression):
pass
| Make models inherit from sklearn | Make models inherit from sklearn
| Python | bsd-3-clause | mwojcikowski/opendrugdiscovery |
5b4c9cebd31e81f775d996ec9168b72d07142caa | follower/pid.py | follower/pid.py |
import sys
from time import time
class PID(object):
def __init__(self):
"""initizes value for the PID"""
self.kd = 0
self.ki = 0
self.kp = 1
self.previous_error = 0
self.integral_error = 0
def set_k_values(self, kp, kd, ki):
self.kp = kp
self.ki = ki
self.kd = kd
def pid(self, target, process_var, timestep):
current_error = (target - process_var)
p_error = self.kp * current_error
d_error = self.kd * (current_error - self.previous_error) \
/ timestep
self.integral_error = (
current_error + self.previous_error) / 2 \
+ self.integral_error
i_error = self.ki * self.integral_error
total_error = p_error + d_error + i_error
self.previous_error = current_error
return total_error
|
import sys
from time import time
class PID(object):
def __init__(self):
"""initizes value for the PID"""
self.kd = 0
self.ki = 0
self.kp = 1
self.previous_error = 0
self.integral_error = 0
def set_k_values(self, kp, kd, ki):
self.kp = kp
self.ki = ki
self.kd = kd
def pid(self, target, process_var, timestep):
current_error = (target + process_var)
p_error = self.kp * current_error
d_error = self.kd * (current_error - self.previous_error) \
/ timestep
self.integral_error = (
current_error + self.previous_error) / 2 \
+ self.integral_error
i_error = self.ki * self.integral_error
total_error = p_error + d_error + i_error
self.previous_error = current_error
return total_error
| Update to follower, reduce speed to motors. | Update to follower, reduce speed to motors.
| Python | bsd-2-clause | deepakiam/bot,IEEERobotics/bot,IEEERobotics/bot,IEEERobotics/bot,deepakiam/bot,deepakiam/bot |
740cc8b601eb2cd24bcabef59db9a38d2efde70f | netbox/dcim/fields.py | netbox/dcim/fields.py | from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from netaddr import AddrFormatError, EUI, mac_unix_expanded
class ASNField(models.BigIntegerField):
description = "32-bit ASN field"
default_validators = [
MinValueValidator(1),
MaxValueValidator(4294967295),
]
class mac_unix_expanded_uppercase(mac_unix_expanded):
word_fmt = '%.2X'
class MACAddressField(models.Field):
description = "PostgreSQL MAC Address field"
def python_type(self):
return EUI
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def to_python(self, value):
if value is None:
return value
try:
return EUI(value, version=48, dialect=mac_unix_expanded_uppercase)
except AddrFormatError as e:
raise ValidationError("Invalid MAC address format: {}".format(value))
def db_type(self, connection):
return 'macaddr'
def get_prep_value(self, value):
if not value:
return None
return str(self.to_python(value))
| from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from netaddr import AddrFormatError, EUI, mac_unix_expanded
class ASNField(models.BigIntegerField):
description = "32-bit ASN field"
default_validators = [
MinValueValidator(1),
MaxValueValidator(4294967295),
]
class mac_unix_expanded_uppercase(mac_unix_expanded):
word_fmt = '%.2X'
class MACAddressField(models.Field):
description = "PostgreSQL MAC Address field"
def python_type(self):
return EUI
def from_db_value(self, value, expression, connection):
return self.to_python(value)
def to_python(self, value):
if value is None:
return value
try:
return EUI(value, version=48, dialect=mac_unix_expanded_uppercase)
except AddrFormatError as e:
raise ValidationError("Invalid MAC address format: {}".format(value))
def db_type(self, connection):
return 'macaddr'
def get_prep_value(self, value):
if not value:
return None
return str(self.to_python(value))
| Remove deprecated context parameter from from_db_value | Remove deprecated context parameter from from_db_value
| Python | apache-2.0 | digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox |
0855f9b5a9d36817139e61937419553f6ad21f78 | symposion/proposals/urls.py | symposion/proposals/urls.py | from django.conf.urls.defaults import *
urlpatterns = patterns("symposion.proposals.views",
url(r"^submit/$", "proposal_submit", name="proposal_submit"),
url(r"^submit/(\w+)/$", "proposal_submit_kind", name="proposal_submit_kind"),
url(r"^(\d+)/$", "proposal_detail", name="proposal_detail"),
url(r"^(\d+)/edit/$", "proposal_edit", name="proposal_edit"),
url(r"^(\d+)/speakers/$", "proposal_speaker_manage", name="proposal_speaker_manage"),
url(r"^(\d+)/cancel/$", "proposal_cancel", name="proposal_cancel"),
url(r"^(\d+)/leave/$", "proposal_leave", name="proposal_leave"),
url(r"^(\d+)/join/$", "proposal_pending_join", name="proposal_pending_join"),
url(r"^(\d+)/decline/$", "proposal_pending_decline", name="proposal_pending_decline"),
url(r"^(\d+)/document/create/$", "document_create", name="proposal_document_create"),
url(r"^document/(\d+)/delete/$", "document_delete", name="proposal_document_delete"),
url(r"^document/(\d+)/([^/]+)$", "document_download", name="proposal_document_download"),
)
| from django.conf.urls import patterns, url
urlpatterns = patterns("symposion.proposals.views",
url(r"^submit/$", "proposal_submit", name="proposal_submit"),
url(r"^submit/([\w-]+)/$", "proposal_submit_kind", name="proposal_submit_kind"),
url(r"^(\d+)/$", "proposal_detail", name="proposal_detail"),
url(r"^(\d+)/edit/$", "proposal_edit", name="proposal_edit"),
url(r"^(\d+)/speakers/$", "proposal_speaker_manage", name="proposal_speaker_manage"),
url(r"^(\d+)/cancel/$", "proposal_cancel", name="proposal_cancel"),
url(r"^(\d+)/leave/$", "proposal_leave", name="proposal_leave"),
url(r"^(\d+)/join/$", "proposal_pending_join", name="proposal_pending_join"),
url(r"^(\d+)/decline/$", "proposal_pending_decline", name="proposal_pending_decline"),
url(r"^(\d+)/document/create/$", "document_create", name="proposal_document_create"),
url(r"^document/(\d+)/delete/$", "document_delete", name="proposal_document_delete"),
url(r"^document/(\d+)/([^/]+)$", "document_download", name="proposal_document_download"),
)
| Allow dashes in proposal kind slugs | Allow dashes in proposal kind slugs
We can see from the setting PROPOSAL_FORMS that at least one proposal kind,
Sponsor Tutorial, has a slug with a dash in it: sponsor-tutorial. Yet the
URL pattern for submitting a proposal doesn't accept dashes in the slug.
Fix it.
| Python | bsd-3-clause | njl/pycon,pyconjp/pyconjp-website,njl/pycon,Diwahars/pycon,smellman/sotmjp-website,pyconjp/pyconjp-website,pyconjp/pyconjp-website,PyCon/pycon,osmfj/sotmjp-website,njl/pycon,Diwahars/pycon,PyCon/pycon,pyconjp/pyconjp-website,osmfj/sotmjp-website,osmfj/sotmjp-website,PyCon/pycon,osmfj/sotmjp-website,smellman/sotmjp-website,smellman/sotmjp-website,PyCon/pycon,smellman/sotmjp-website,Diwahars/pycon,Diwahars/pycon,njl/pycon |
da9c0743657ecc890c2a8503ea4bbb681ae00178 | tests/chainer_tests/functions_tests/math_tests/test_arctanh.py | tests/chainer_tests/functions_tests/math_tests/test_arctanh.py | import unittest
from chainer import testing
import chainer.functions as F
import numpy
def make_data(shape, dtype):
# Input values close to -1 or 1 would make tests unstable
x = numpy.random.uniform(-0.9, 0.9, shape).astype(dtype, copy=False)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype, copy=False)
ggx = numpy.random.uniform(-1, 1, shape).astype(dtype, copy=False)
return x, gy, ggx
@testing.unary_math_function_unittest(F.arctanh, make_data=make_data)
class TestArctanh(unittest.TestCase):
pass
| import unittest
from chainer import testing
import chainer.functions as F
import numpy
def make_data(shape, dtype):
# Input values close to -1 or 1 would make tests unstable
x = numpy.random.uniform(-0.9, 0.9, shape).astype(dtype, copy=False)
gy = numpy.random.uniform(-1, 1, shape).astype(dtype, copy=False)
ggx = numpy.random.uniform(-1, 1, shape).astype(dtype, copy=False)
return x, gy, ggx
@testing.unary_math_function_unittest(F.arctanh, make_data=make_data)
class TestArctanh(unittest.TestCase):
pass
testing.run_module(__name__, __file__)
| Call testing.run_module at the end of the test | Call testing.run_module at the end of the test
| Python | mit | okuta/chainer,keisuke-umezawa/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer,chainer/chainer,niboshi/chainer,okuta/chainer,pfnet/chainer,chainer/chainer,tkerola/chainer,chainer/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,wkentaro/chainer,niboshi/chainer,niboshi/chainer,keisuke-umezawa/chainer,chainer/chainer,hvy/chainer,keisuke-umezawa/chainer,hvy/chainer,hvy/chainer |
584891ce58c3e979a5d6871ba7a6ff0a9e01d780 | routes/student_vote.py | routes/student_vote.py | from aiohttp import web
from db_helper import get_project_id, get_most_recent_group, get_user_id
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if project.group is not get_most_recent_group(session):
return web.Response(status=403, text="Cannot join legacy projects")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
| from aiohttp import web
from db_helper import get_project_id, get_user_id, can_choose_project
from permissions import view_only, value_set
@view_only("join_projects")
@value_set("student_choosable")
async def on_submit(request):
session = request.app["session"]
cookies = request.cookies
post = await request.post()
option = int(post["order"]) - 1
attrs = ["first_option_id", "second_option_id", "third_option_id"]
project = get_project_id(session, int(post["choice"]))
if not can_choose_project(session, cookies, project):
return web.Response(status=403, text="You cannot choose this project")
user = get_user_id(session, cookies)
setattr(user, attrs[option], project.id)
for attr in set(attrs) - {attrs[option]}:
if getattr(user, attr) == project.id:
setattr(user, attr, None)
session.commit()
return web.Response(status=200, text="set")
| Check if student can choose a project before allowing them to join it | Check if student can choose a project before allowing them to join it
| Python | agpl-3.0 | wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp,wtsi-hgi/CoGS-Webapp |
92ab5c0878ba528fb49a42fde64dd4d6474bc1e8 | app/models.py | app/models.py | from app import db
class User(db.Model):
__tablename__ = 'users'
username = db.Column(db.String(64), nullable=False, unique=True, primary_key=True)
password = db.Column(db.String(192), nullable=False)
def __init__(self, username, password):
self.username = username
self.password = password
def __repr__(self):
return 'The users name is: %r' % self.username
class Patient(db.Model):
__tablename__ = 'patients'
# Used to determine which nurse triaged a patient.
# clientname = db.Column(db.String(64), db.ForeignKey('users.username'))
mobile = db.Column(db.Integer, unique=True, primary_key=True)
forename = db.Column(db.String(64), nullable=False)
surname = db.Column(db.String(64), nullable=False)
dob = db.Column(db.Date)
def __init__(self, mobile, forename, surname, dob):
self.mobile = mobile
self.forename = forename
self.surname = surname
self.dob = dob
def __repr__(self):
return 'The mobile number and name are: %r, %r %r' % (self.mobile, self.forename, self.surname)
| from app import db
class User(db.Model):
__tablename__ = 'users'
username = db.Column(db.String(64), nullable=False, unique=True, primary_key=True)
password = db.Column(db.String(192), nullable=False)
def __init__(self, username, password):
self.username = username
self.password = password
def __repr__(self):
return 'The users name is: %r' % self.username
class Patient(db.Model):
__tablename__ = 'patients'
forename = db.Column(db.String(64), nullable=False)
surname = db.Column(db.String(64), nullable=False)
dob = db.Column(db.Date)
mobile = db.Column(db.String(30), nullable=False, unique=True, primary_key=True)
def __init__(self, forename, surname, dob, mobile):
self.forename = forename
self.surname = surname
self.dob = dob
self.mobile = mobile
def __repr__(self):
return 'The patients name & mobile number are: %r %r, %r' % (self.forename, self.surname, self.mobile)
| Update order of patient attributes in model. | Update order of patient attributes in model.
| Python | mit | jawrainey/atc,jawrainey/atc |
eefff91804317f4fb2c518446ab8e2072af4d87f | app/models.py | app/models.py | from django.db import models
import mongoengine
from mongoengine import Document, EmbeddedDocument
from mongoengine.fields import *
# Create your models here.
class Greeting(models.Model):
when = models.DateTimeField('date created', auto_now_add=True)
MONGODB_URI = 'mongodb+srv://fikaadmin:[email protected]/fikanotedb?retryWrites=true&w=majority'
mongoengine.connect('fikanotedb', host=MONGODB_URI)
class Shownote(EmbeddedDocument):
url = URLField()
title = StringField()
date = DateTimeField()
class FikanoteDB(Document):
title = StringField()
number = IntField()
person = ListField(StringField())
agenda = StringField()
date = DateTimeField()
shownotes = ListField(EmbeddedDocumentField(Shownote))
meta = {'collection': 'fikanotedb'}
class AgendaDB(Document):
url = URLField()
title = StringField()
date = DateTimeField()
meta = {'collection': 'agendadb'}
| from django.db import models
import mongoengine
from mongoengine import Document, EmbeddedDocument
from mongoengine.fields import *
import os
# Create your models here.
class Greeting(models.Model):
when = models.DateTimeField('date created', auto_now_add=True)
USER = os.getenv('DATABASE_USER')
PASWORD = os.getenv('DATABASE_PASSWORD')
MONGODB_URI = "mongodb+srv://{}:{}@fikanotedb.ltkpy.mongodb.net/fikanotedb?retryWrites=true&w=majority".format(USER, PASWORD)
mongoengine.connect('fikanotedb', host=MONGODB_URI)
class Shownote(EmbeddedDocument):
url = URLField()
title = StringField()
date = DateTimeField()
class FikanoteDB(Document):
title = StringField()
number = IntField()
person = ListField(StringField())
agenda = StringField()
date = DateTimeField()
shownotes = ListField(EmbeddedDocumentField(Shownote))
meta = {'collection': 'fikanotedb'}
class AgendaDB(Document):
url = URLField()
title = StringField()
date = DateTimeField()
meta = {'collection': 'agendadb'}
| Remove username and password from repository | Remove username and password from repository
| Python | mit | gmkou/FikaNote,gmkou/FikaNote,gmkou/FikaNote |
556cef75198e3a5a8ac3e8f523c54b0b2df6a2c1 | mousestyles/data/tests/test_data.py | mousestyles/data/tests/test_data.py | """Standard test data.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy.testing import assert_equal
import mousestyles.data as data
def test_all_features_mousedays_11bins():
all_features = data.all_feature_data()
print(all_features.shape)
| """Standard test data.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy.testing import assert_equal
import mousestyles.data as data
def test_all_features_loader():
all_features = data.load_all_features()
assert_equal(all_features.shape, (21131, 13))
| Test for new data loader | TST: Test for new data loader
Just a start, should probably add a more detailed test later.
| Python | bsd-2-clause | berkeley-stat222/mousestyles,togawa28/mousestyles,changsiyao/mousestyles |
1e10fa30998f63359ddd26d9804bd32a837c2cab | armstrong/esi/tests/_utils.py | armstrong/esi/tests/_utils.py | from django.conf import settings
from django.test import TestCase as DjangoTestCase
import fudge
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
| from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
| Add in a decorator for generating fake request objects for test cases | Add in a decorator for generating fake request objects for test cases
| Python | bsd-3-clause | armstrong/armstrong.esi |
c8896c3eceb6ef7ffc6eef16af849597a8f7b8e2 | Lib/test/test_sunaudiodev.py | Lib/test/test_sunaudiodev.py | from test_support import verbose, TestFailed
import sunaudiodev
import os
def findfile(file):
if os.path.isabs(file): return file
import sys
for dn in sys.path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
return file
def play_sound_file(path):
fp = open(path, 'r')
data = fp.read()
fp.close()
try:
a = sunaudiodev.open('w')
except sunaudiodev.error, msg:
raise TestFailed, msg
else:
a.write(data)
a.close()
def test():
play_sound_file(findfile('audiotest.au'))
test()
| from test_support import verbose, TestFailed
import sunaudiodev
import os
def findfile(file):
if os.path.isabs(file): return file
import sys
path = sys.path
try:
path = [os.path.dirname(__file__)] + path
except NameError:
pass
for dn in path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
return file
def play_sound_file(path):
fp = open(path, 'r')
data = fp.read()
fp.close()
try:
a = sunaudiodev.open('w')
except sunaudiodev.error, msg:
raise TestFailed, msg
else:
a.write(data)
a.close()
def test():
play_sound_file(findfile('audiotest.au'))
test()
| Make this test work when imported from the interpreter instead of run from regrtest.py (it still works there too, of course). | Make this test work when imported from the interpreter instead of run
from regrtest.py (it still works there too, of course).
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator |
675e0a29f780d6053d942dce4f80c6d934f3785a | Python/tigre/utilities/Ax.py | Python/tigre/utilities/Ax.py | from _Ax import _Ax_ext
import numpy as np
import copy
def Ax(img, geo, angles, projection_type="Siddon"):
if img.dtype != np.float32:
raise TypeError("Input data should be float32, not "+ str(img.dtype))
if not np.isreal(img).all():
raise ValueError("Complex types not compatible for projection.")
geox = copy.deepcopy(geo)
geox.check_geo(angles)
"""
Here we cast all values in geo to single point precision float. This way we
know what behaviour to expect from pytigre to Cuda and can change
single parameters accordingly.
"""
geox.cast_to_single()
#geox.checknans()
if abs(img.shape - geox.nVoxel).max()>1e-8:
raise ValueError("Input data should be of shape geo.nVoxel: "+ str(geox.nVoxel) +
" not:" + str(img.shape))
return _Ax_ext(img, geox, geox.angles, projection_type, geox.mode)
| from _Ax import _Ax_ext
import numpy as np
import copy
def Ax(img, geo, angles, projection_type="Siddon"):
if img.dtype != np.float32:
raise TypeError("Input data should be float32, not "+ str(img.dtype))
if not np.isreal(img).all():
raise ValueError("Complex types not compatible for projection.")
if any(img.shape != geo.nVoxel):
raise ValueError("Input data should be of shape geo.nVoxel: "+ str(geo.nVoxel) +
" not:" + str(img.shape))
geox = copy.deepcopy(geo)
geox.check_geo(angles)
"""
Here we cast all values in geo to single point precision float. This way we
know what behaviour to expect from pytigre to Cuda and can change
single parameters accordingly.
"""
geox.cast_to_single()
#geox.checknans()
return _Ax_ext(img, geox, geox.angles, projection_type, geox.mode)
| Check the shape of input data earlier | Check the shape of input data earlier
Using geo.nVoxel to check the input img shape earlier, before geo is casted to float32 (geox). We should use any() instead of all(), since "!=" is used? | Python | bsd-3-clause | CERN/TIGRE,CERN/TIGRE,CERN/TIGRE,CERN/TIGRE |
08d6c4414d72b5431d5a50013058f325f38d7b1c | txdbus/test/test_message.py | txdbus/test/test_message.py | import os
import unittest
from txdbus import error, message
class MessageTester(unittest.TestCase):
def test_too_long(self):
class E(message.ErrorMessage):
_maxMsgLen = 1
def c():
E('foo.bar', 5)
self.assertRaises(error.MarshallingError, c)
def test_reserved_path(self):
def c():
message.MethodCallMessage('/org/freedesktop/DBus/Local', 'foo')
self.assertRaises(error.MarshallingError, c)
def test_invalid_message_type(self):
class E(message.ErrorMessage):
_messageType=99
try:
message.parseMessage(E('foo.bar', 5).rawMessage)
self.assertTrue(False)
except Exception as e:
self.assertEquals(str(e), 'Unknown Message Type: 99')
| import os
import unittest
from txdbus import error, message
class MessageTester(unittest.TestCase):
def test_too_long(self):
class E(message.ErrorMessage):
_maxMsgLen = 1
def c():
E('foo.bar', 5)
self.assertRaises(error.MarshallingError, c)
def test_reserved_path(self):
def c():
message.MethodCallMessage('/org/freedesktop/DBus/Local', 'foo')
self.assertRaises(error.MarshallingError, c)
def test_invalid_message_type(self):
class E(message.ErrorMessage):
_messageType=99
try:
message.parseMessage(E('foo.bar', 5).rawMessage, oobFDs=[])
self.assertTrue(False)
except Exception as e:
self.assertEquals(str(e), 'Unknown Message Type: 99')
| Fix message tests after in message.parseMessage args three commits ago | Fix message tests after in message.parseMessage args three commits ago
(three commits ago is 08a6c170daa79e74ba538c928e183f441a0fb441)
| Python | mit | cocagne/txdbus |
84c2c987151451180281f1aecb0483321462340c | influxalchemy/__init__.py | influxalchemy/__init__.py | """ InfluxDB Alchemy. """
from .client import InfluxAlchemy
from .measurement import Measurement
__version__ = "0.1.0"
| """ InfluxDB Alchemy. """
import pkg_resources
from .client import InfluxAlchemy
from .measurement import Measurement
try:
__version__ = pkg_resources.get_distribution(__package__).version
except pkg_resources.DistributionNotFound: # pragma: no cover
__version__ = None # pragma: no cover
| Use package version for __version__ | Use package version for __version__
| Python | mit | amancevice/influxalchemy |
fc6e3c276ee638fbb4409fa00d470817205f2028 | lib/awsflow/test/workflow_testing_context.py | lib/awsflow/test/workflow_testing_context.py | from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
self._context = self.get_context()
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
| from awsflow.core import AsyncEventLoop
from awsflow.context import ContextBase
class WorkflowTestingContext(ContextBase):
def __init__(self):
self._event_loop = AsyncEventLoop()
def __enter__(self):
try:
self._context = self.get_context()
except AttributeError:
self._context = None
self.set_context(self)
self._event_loop.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self._event_loop.execute_all_tasks()
self._event_loop.__exit__(exc_type, exc_val, exc_tb)
| Fix context setting on the test context | Fix context setting on the test context
| Python | apache-2.0 | darjus/botoflow,boto/botoflow |
b3fb2ba913a836a1e198795019870e318879d5f7 | dictionary/forms.py | dictionary/forms.py | from django import forms
from django.forms.models import BaseModelFormSet
from django.utils.translation import ugettext_lazy as _
class BaseWordFormSet(BaseModelFormSet):
def add_fields(self, form, index):
super(BaseWordFormSet, self).add_fields(form, index)
form.fields["isLocal"] = forms.BooleanField(label=_("Local"))
| from django import forms
from django.forms.models import BaseModelFormSet
from django.utils.translation import ugettext_lazy as _
class BaseWordFormSet(BaseModelFormSet):
def add_fields(self, form, index):
super(BaseWordFormSet, self).add_fields(form, index)
form.fields["isLocal"] = forms.BooleanField(label=_("Local"), required=False)
| Make sure the isLocal BooleanField is not required | Make sure the isLocal BooleanField is not required
| Python | agpl-3.0 | sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer |
7db27a3629e442c99abd24503f08d982b6a30e33 | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set symbols.clang-modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Darwin")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@skipUnlessDarwin
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set symbols.clang-modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Foo")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function. | Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@326640 91177308-0d34-0410-b5e6-96231b3b80d8
(cherry picked from commit cb9b1a2163f960e34721f74bad30622fda71e43b)
| Python | apache-2.0 | apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb |
d805f9f90558416f69223c72c25357345aae44c7 | filer/__init__.py | filer/__init__.py | #-*- coding: utf-8 -*-
# version string following pep-0396 and pep-0386
__version__ = '0.9pbs.9' # pragma: nocover
| #-*- coding: utf-8 -*-
# version string following pep-0396 and pep-0386
__version__ = '0.9pbs.10' # pragma: nocover
| Bump the version since there are commits on top of last tag. | Bump the version since there are commits on top of last tag. | Python | bsd-3-clause | pbs/django-filer,pbs/django-filer,pbs/django-filer,pbs/django-filer,pbs/django-filer |
e34b7c8d9e869ac1be10e8ae3d71cea794044e13 | docs/blender-sphinx-build.py | docs/blender-sphinx-build.py | import os
import site # get site-packages into sys.path
import sys
# add local addons folder to sys.path so blender finds it
sys.path = (
[os.path.join(os.path.dirname(__file__), '..', 'scripts', 'addons')]
+ sys.path
)
# run sphinx builder
# this assumes that the builder is called as
# "blender --background --factory-startup --python blender-sphinx-build.py -- ..."
# pass the correct arguments by dropping the arguments prior to --
import sphinx
argv = ['blender-sphinx-build'] + sys.argv[6:]
sphinx.main(argv=argv)
| import os
import site # get site-packages into sys.path
import sys
# add local addons folder to sys.path so blender finds it
sys.path = (
[os.path.join(os.path.dirname(__file__), '..')]
+ sys.path
)
# run sphinx builder
# this assumes that the builder is called as
# "blender --background --factory-startup --python blender-sphinx-build.py -- ..."
# pass the correct arguments by dropping the arguments prior to --
import sphinx
argv = ['blender-sphinx-build'] + sys.argv[6:]
sphinx.main(argv=argv)
| Correct sys.path when generating docs. | Correct sys.path when generating docs.
| Python | bsd-3-clause | nightstrike/blender_nif_plugin,amorilia/blender_nif_plugin,amorilia/blender_nif_plugin,nightstrike/blender_nif_plugin |
2f60d4665a960578ab97bdaf313893ec366c24f1 | kdb/default_config.py | kdb/default_config.py | # Module: defaults
# Date: 14th May 2008
# Author: James Mills, prologic at shortcircuit dot net dot au
"""defaults - System Defaults
This module contains default configuration and sane defaults for various
parts of the system. These defaults are used by the environment initially
when no environment has been created.
"""
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
| # Module: defaults
# Date: 14th May 2008
# Author: James Mills, prologic at shortcircuit dot net dot au
"""defaults - System Defaults
This module contains default configuration and sane defaults for various
parts of the system. These defaults are used by the environment initially
when no environment has been created.
"""
CONFIG = {
"server": {
"host": "irc.freenode.net",
"port": 6667
},
"bot": {
"nick": "kdb",
"ident": "kdb",
"name": "Knowledge Database Bot",
"channels": "#circuits",
},
"plugins": {
"broadcast.*": "enabled",
"channels.*": "enabled",
"core.*": "enabled",
"ctcp.*": "enabled",
"dnstools.*": "enabled",
"eval.*": "enabled",
"google.*": "enabled",
"greeting.*": "enabled",
"help.*": "enabled",
"irc.*": "enabled",
"remote.*": "enabled",
"rmessage.*": "enabled",
"rnotify.*": "enabled",
"stats.*": "enabled",
"swatch.*": "enabled",
"timers.*": "enabled",
},
}
| Enable remote, rmessage and rnotify plugins by default | Enable remote, rmessage and rnotify plugins by default
| Python | mit | prologic/kdb,prologic/kdb,prologic/kdb |
6eca222d0bc36b2573a09c1345d940239f8e9d4d | documents/models.py | documents/models.py | from django.db import models
from django.urls import reverse
class Document(models.Model):
FILE_TYPES = ('md', 'txt')
repo = models.ForeignKey('interface.Repo', related_name='documents')
path = models.TextField()
filename = models.TextField()
body = models.TextField(blank=True)
commit_date = models.DateTimeField()
def __str__(self):
return '{}/{}'.format(self.path, self.filename)
@property
def github_view_link(self):
return 'https://github.com/{0}/blob/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, str(self))
@property
def github_edit_link(self):
return 'https://github.com/{0}/edit/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, str(self))
def get_absolute_url(self):
return reverse('repo_detail', kwargs={'full_name': self.repo.full_name, 'path': str(self)})
class Meta:
unique_together = ('repo', 'path', 'filename')
| from django.db import models
from django.urls import reverse
class Document(models.Model):
FILE_TYPES = ('md', 'txt')
repo = models.ForeignKey('interface.Repo', related_name='documents')
path = models.TextField()
filename = models.TextField()
body = models.TextField(blank=True)
commit_date = models.DateTimeField()
def __str__(self):
return self.full_path
@property
def full_path(self):
return '{}/{}'.format(self.path, self.filename)
@property
def github_view_link(self):
return 'https://github.com/{0}/blob/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, self.full_path)
@property
def github_edit_link(self):
return 'https://github.com/{0}/edit/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, self.full_path)
def get_absolute_url(self):
return reverse('repo_detail', kwargs={'full_name': self.repo.full_name, 'path': self.full_path})
class Meta:
unique_together = ('repo', 'path', 'filename')
| Move Document.__str__ to named method | Move Document.__str__ to named method
| Python | mit | ZeroCater/Eyrie,ZeroCater/Eyrie,ZeroCater/Eyrie |
89d9987f742fa74fc3646ccc163610d0c9400d75 | dewbrick/utils.py | dewbrick/utils.py | import tldextract
import pyphen
from random import choice
TITLES = ('Mister', 'Little Miss')
SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD')
def generate_name(domain):
title = choice(TITLES)
_parts = tldextract.extract(domain)
_parts = [_parts.subdomain, _parts.domain]
parts = []
for i, part in enumerate(_parts):
if part and part != 'www':
parts.append('{}{}'.format(part[0].upper(), part[1:]))
name = '-'.join(parts)
dic = pyphen.Pyphen(lang='en_US')
name = '{} {}'.format(title, dic.inserted(name))
if choice((True, False)):
name = '{} {}'.format(name, choice(SUFFIXES))
return name
| import tldextract
import pyphen
from random import choice
TITLES = ('Mister', 'Little Miss', 'Señor', 'Queen')
SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD',
'Ah-gowan-gowan-gowan')
def generate_name(domain):
title = choice(TITLES)
_parts = tldextract.extract(domain)
_parts = [_parts.subdomain, _parts.domain]
parts = []
for i, part in enumerate(_parts):
if part and part != 'www':
parts.append('{}{}'.format(part[0].upper(), part[1:]))
name = '-'.join(parts)
dic = pyphen.Pyphen(lang='en_US')
name = '{} {}'.format(title, dic.inserted(name))
if choice((True, False)):
name = '{} {}'.format(name, choice(SUFFIXES))
return name
| Add more titles and suffixes | Add more titles and suffixes
| Python | apache-2.0 | ohmygourd/dewbrick,ohmygourd/dewbrick,ohmygourd/dewbrick |
e9814c857bdbf3d163352abddade1d12f0e30810 | mbaas/settings_jenkins.py | mbaas/settings_jenkins.py | from mbaas.settings import *
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--cover-erase',
'--with-xunit',
'--with-coverage',
'--cover-xml',
'--cover-html',
'--cover-package=accounts,push',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.db'),
}
}
| from mbaas.settings import *
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-xunit',
'--with-coverage',
'--cover-xml',
'--cover-html',
'--cover-package=accounts,push',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'test.db'),
}
}
| Remove clear before test results | Remove clear before test results | Python | apache-2.0 | nnsnodnb/django-mbaas,nnsnodnb/django-mbaas,nnsnodnb/django-mbaas |
4b4ed18f01c13c321285463628bb0a3b70a75ac5 | test/conftest.py | test/conftest.py | import functools
import os.path
import shutil
import sys
import tempfile
import pytest
@pytest.fixture(scope="function")
def HOME(tmpdir):
home = os.path.join(tmpdir, 'john')
os.mkdir(home)
# NOTE: homely._utils makes use of os.environ['HOME'], so we need to
# destroy any homely modules that may have imported things based on this.
# Essentially we blast away the entire module and reload it from scratch.
for name in list(sys.modules.keys()):
if name.startswith('homely.'):
sys.modules.pop(name, None)
os.environ['HOME'] = home
return home
@pytest.fixture(scope="function")
def tmpdir(request):
path = tempfile.mkdtemp()
destructor = shutil.rmtree
def destructor(path):
print("rm -rf %s" % path)
shutil.rmtree(path)
request.addfinalizer(functools.partial(destructor, path))
return os.path.realpath(path)
| import functools
import os.path
import shutil
import sys
import tempfile
import pytest
@pytest.fixture(scope="function")
def HOME(tmpdir):
old_home = os.environ['HOME']
try:
home = os.path.join(tmpdir, 'john')
os.mkdir(home)
# NOTE: homely._utils makes use of os.environ['HOME'], so we need to
# destroy any homely modules that may have imported things based on this.
# Essentially we blast away the entire module and reload it from scratch.
for name in list(sys.modules.keys()):
if name.startswith('homely.'):
sys.modules.pop(name, None)
os.environ['HOME'] = home
yield home
finally:
os.environ['HOME'] = old_home
@pytest.fixture(scope="function")
def tmpdir(request):
path = tempfile.mkdtemp()
destructor = shutil.rmtree
def destructor(path):
print("rm -rf %s" % path)
shutil.rmtree(path)
request.addfinalizer(functools.partial(destructor, path))
return os.path.realpath(path)
| Rework HOME fixture so it doesn't leave os.environ corrupted | Rework HOME fixture so it doesn't leave os.environ corrupted
| Python | mit | phodge/homely,phodge/homely |
4be8c3f8164fe0973d6277ea0d827b777cd4a988 | locations/pipelines.py | locations/pipelines.py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
ref = item['ref']
if ref in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(ref)
return item
class ApplySpiderNamePipeline(object):
def process_item(self, item, spider):
existing_extras = item.get('extras', {})
existing_extras['@spider'] = spider.name
item['extras'] = existing_extras
return item
| # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
class DuplicatesPipeline(object):
def __init__(self):
self.ids_seen = set()
def process_item(self, item, spider):
ref = (spider.name, item['ref'])
if ref in self.ids_seen:
raise DropItem("Duplicate item found: %s" % item)
else:
self.ids_seen.add(ref)
return item
class ApplySpiderNamePipeline(object):
def process_item(self, item, spider):
existing_extras = item.get('extras', {})
existing_extras['@spider'] = spider.name
item['extras'] = existing_extras
return item
| Include spider name in item dedupe pipeline | Include spider name in item dedupe pipeline
| Python | mit | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places |
edd5adc9be2a700421bd8e98af825322796b8714 | dns/models.py | dns/models.py | from google.appengine.ext import db
TOP_LEVEL_DOMAINS = 'com net org biz info'.split()
class Lookup(db.Model):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Updates since 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name, see tools/update_dns.py.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = db.IntegerProperty(indexed=False)
net = db.IntegerProperty(indexed=False)
org = db.IntegerProperty(indexed=False)
biz = db.IntegerProperty(indexed=False)
info = db.IntegerProperty(indexed=False)
| from google.appengine.ext import db
TOP_LEVEL_DOMAINS = """
com net org biz info
ag am at
be by
ch ck
de
es eu
fm
in io is it
la li ly
me mobi ms
name
ru
se sh sy
tel th to travel tv
us
""".split()
# Omitting nu, ph, st, ws because they don't seem to have NXDOMAIN.
class UpgradeStringProperty(db.IntegerProperty):
def validate(self, value):
return unicode(value) if value else u''
class Lookup(db.Expando):
"""
The datastore key name is the domain name, without top level.
IP address fields use 0 (zero) for NXDOMAIN because None is
returned for missing properties.
Some updates on 2010-01-01 use negative numbers for 60 bit hashes of
the SOA server name.
Since 2010-01-02, this model inherits from Expando to flexibly add
more top level domains. Each property stores the authority name
server as string backwards, e.g. com.1and1.ns1 for better sorting.
"""
backwards = db.StringProperty(required=True) # For suffix matching.
timestamp = db.DateTimeProperty(required=True) # Created or updated.
com = UpgradeStringProperty()
net = UpgradeStringProperty()
org = UpgradeStringProperty()
biz = UpgradeStringProperty()
info = UpgradeStringProperty()
| Upgrade Lookup model to Expando and DNS result properties from integer to string. | Upgrade Lookup model to Expando and DNS result properties from integer to string.
| Python | mit | jcrocholl/nxdom,jcrocholl/nxdom |
00cbac852e83eb1f3ddc03ed70ad32494f16fdbf | caslogging.py | caslogging.py | """
file: caslogging.py
author: Ben Grawi <[email protected]>
date: October 2013
description: Sets up the logging information for the CAS Reader
"""
from config import config
import logging as root_logging
# Set up the logger
logger = root_logging.getLogger()
logger.setLevel(root_logging.INFO)
logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
logging_file_handler = root_logging.FileHandler(config['logging']['filename'])
logging_file_handler.setLevel(root_logging.INFO)
logging_file_handler.setFormatter(logger_format)
logger.addHandler(logging_file_handler)
logging_stream_handler = root_logging.StreamHandler()
logging_stream_handler.setLevel(root_logging.INFO)
logging_stream_handler.setFormatter(logger_format)
logger.addHandler(logging_stream_handler)
logging = root_logging | """
file: caslogging.py
author: Ben Grawi <[email protected]>
date: October 2013
description: Sets up the logging information for the CAS Reader
"""
from config import config
import logging as root_logging
# Set up the logger
logger = root_logging.getLogger()
logger.setLevel(root_logging.INFO)
logger_format = root_logging.Formatter('%(asctime)s %(levelname)s: %(message)s', '%Y-%m-%d %H:%M:%S')
logging_file_handler = root_logging.FileHandler(config['logging_system']['filename'])
logging_file_handler.setLevel(root_logging.INFO)
logging_file_handler.setFormatter(logger_format)
logger.addHandler(logging_file_handler)
logging_stream_handler = root_logging.StreamHandler()
logging_stream_handler.setLevel(root_logging.INFO)
logging_stream_handler.setFormatter(logger_format)
logger.addHandler(logging_stream_handler)
logging = root_logging | Fix of the logging system exception | Fix of the logging system exception
Added a format to the date for the logging system. '%Y-%m-%d %H:%M:%S’.
Fixed an exception opening the logging file because the variable name
was not written correctly.
| Python | mit | bumper-app/bumper-bianca,bumper-app/bumper-bianca |
8bacd0f657a931754d8c03e2de86c5e00ac5f791 | modoboa/lib/cryptutils.py | modoboa/lib/cryptutils.py | # coding: utf-8
from Crypto.Cipher import AES
import base64
import random
import string
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key
:param integer l: the key's length
:return: a string
"""
char_set = string.digits + string.letters + string.punctuation
return ''.join(random.sample(char_set * l, l))
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
| # coding: utf-8
"""Crypto related utilities."""
import base64
import random
import string
from Crypto.Cipher import AES
from modoboa.lib import parameters
def random_key(l=16):
"""Generate a random key.
:param integer l: the key's length
:return: a string
"""
population = string.digits + string.letters + string.punctuation
while True:
key = "".join(random.sample(population * l, l))
if len(key) == l:
return key
def encrypt(clear):
key = parameters.get_admin("SECRET_KEY", app="core")
obj = AES.new(key, AES.MODE_ECB)
if type(clear) is unicode:
clear = clear.encode("utf-8")
if len(clear) % AES.block_size:
clear += " " * (AES.block_size - len(clear) % AES.block_size)
ciph = obj.encrypt(clear)
ciph = base64.b64encode(ciph)
return ciph
def decrypt(ciph):
obj = AES.new(
parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB
)
ciph = base64.b64decode(ciph)
clear = obj.decrypt(ciph)
return clear.rstrip(' ')
def get_password(request):
return decrypt(request.session["password"])
| Make sure key has the required size. | Make sure key has the required size.
see #867
| Python | isc | tonioo/modoboa,modoboa/modoboa,bearstech/modoboa,carragom/modoboa,tonioo/modoboa,modoboa/modoboa,bearstech/modoboa,carragom/modoboa,bearstech/modoboa,bearstech/modoboa,modoboa/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa |
e419deba7b1081d1ece70a1840c770d9faad51f0 | astral/api/tests/test_nodes.py | astral/api/tests/test_nodes.py | from nose.tools import eq_, ok_
from tornado.httpclient import HTTPRequest
import json
from astral.api.tests import BaseTest
from astral.models.node import Node
from astral.models.tests.factories import NodeFactory
class NodesHandlerTest(BaseTest):
def test_get_nodes(self):
[NodeFactory() for _ in range(3)]
response = self.fetch('/nodes')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('nodes' in result)
for node in result['nodes']:
ok_(Node.get_by(uuid=node['uuid']))
def test_register_node(self):
data = {'uuid': "a-unique-id", 'port': 8000}
eq_(Node.get_by(uuid=data['uuid']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/nodes'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Node.get_by(uuid=data['uuid']))
| from nose.tools import eq_, ok_
from tornado.httpclient import HTTPRequest
import json
from astral.api.tests import BaseTest
from astral.models.node import Node
from astral.models.tests.factories import NodeFactory
class NodesHandlerTest(BaseTest):
def test_get_nodes(self):
[NodeFactory() for _ in range(3)]
response = self.fetch('/nodes')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('nodes' in result)
for node in result['nodes']:
ok_(Node.get_by(uuid=node['uuid']))
def test_register_node(self):
data = {'uuid': "a-unique-id", 'port': 8001}
eq_(Node.get_by(uuid=data['uuid']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/nodes'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Node.get_by(uuid=data['uuid']))
| Update test for actually checking existence of nodes before creating. | Update test for actually checking existence of nodes before creating.
| Python | mit | peplin/astral |
61cef22952451df6345355ad596b38cb92697256 | flocker/test/test_flocker.py | flocker/test/test_flocker.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for top-level ``flocker`` package.
"""
from sys import executable
from subprocess import check_output, STDOUT
from twisted.trial.unittest import SynchronousTestCase
class WarningsTests(SynchronousTestCase):
"""
Tests for warning suppression.
"""
def test_warnings_suppressed(self):
"""
Warnings are suppressed for processes that import flocker.
"""
result = check_output(
[executable, b"-c", (b"import flocker; import warnings; " +
b"warnings.warn('ohno')")],
stderr=STDOUT)
self.assertEqual(result, b"")
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
"""
Tests for top-level ``flocker`` package.
"""
from sys import executable
from subprocess import check_output, STDOUT
from twisted.trial.unittest import SynchronousTestCase
from twisted.python.filepath import FilePath
import flocker
class WarningsTests(SynchronousTestCase):
"""
Tests for warning suppression.
"""
def test_warnings_suppressed(self):
"""
Warnings are suppressed for processes that import flocker.
"""
root = FilePath(flocker.__file__)
result = check_output(
[executable, b"-c", (b"import flocker; import warnings; " +
b"warnings.warn('ohno')")],
stderr=STDOUT,
# Make sure we can import flocker package:
cwd=root.parent().parent().path)
self.assertEqual(result, b"")
| Make sure flocker package can be imported even if it's not installed. | Make sure flocker package can be imported even if it's not installed.
| Python | apache-2.0 | beni55/flocker,hackday-profilers/flocker,achanda/flocker,adamtheturtle/flocker,mbrukman/flocker,Azulinho/flocker,w4ngyi/flocker,agonzalezro/flocker,agonzalezro/flocker,1d4Nf6/flocker,moypray/flocker,AndyHuu/flocker,lukemarsden/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,w4ngyi/flocker,Azulinho/flocker,LaynePeng/flocker,lukemarsden/flocker,mbrukman/flocker,moypray/flocker,LaynePeng/flocker,runcom/flocker,AndyHuu/flocker,runcom/flocker,wallnerryan/flocker-profiles,AndyHuu/flocker,agonzalezro/flocker,w4ngyi/flocker,achanda/flocker,hackday-profilers/flocker,adamtheturtle/flocker,lukemarsden/flocker,1d4Nf6/flocker,jml/flocker,runcom/flocker,LaynePeng/flocker,beni55/flocker,adamtheturtle/flocker,moypray/flocker,achanda/flocker,hackday-profilers/flocker,wallnerryan/flocker-profiles,Azulinho/flocker,beni55/flocker,1d4Nf6/flocker,jml/flocker,jml/flocker |
879b093f29135750906f5287e132991de42ea1fe | mqtt/tests/test_client.py | mqtt/tests/test_client.py | import time
from django.test import TestCase
from django.contrib.auth.models import User
from django.conf import settings
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from io import BytesIO
import json
from login.models import Profile, AmbulancePermission, HospitalPermission
from login.serializers import ExtendedProfileSerializer
from ambulance.models import Ambulance, \
AmbulanceStatus, AmbulanceCapability
from ambulance.serializers import AmbulanceSerializer
from hospital.models import Hospital, \
Equipment, HospitalEquipment, EquipmentType
from hospital.serializers import EquipmentSerializer, \
HospitalSerializer, HospitalEquipmentSerializer
from django.test import Client
from .client import MQTTTestCase, MQTTTestClient
from ..client import MQTTException
from ..subscribe import SubscribeClient
class TestMQTT1(MQTTTestCase):
def test(self):
self.assertEqual(True, True)
class TestMQTT2(MQTTTestCase):
def test(self):
self.assertEqual(True, True)
| import time
from django.test import TestCase
from django.contrib.auth.models import User
from django.conf import settings
from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
from io import BytesIO
import json
from login.models import Profile, AmbulancePermission, HospitalPermission
from login.serializers import ExtendedProfileSerializer
from ambulance.models import Ambulance, \
AmbulanceStatus, AmbulanceCapability
from ambulance.serializers import AmbulanceSerializer
from hospital.models import Hospital, \
Equipment, HospitalEquipment, EquipmentType
from hospital.serializers import EquipmentSerializer, \
HospitalSerializer, HospitalEquipmentSerializer
from django.test import Client
from .client import MQTTTestCase, MQTTTestClient
from ..client import MQTTException
from ..subscribe import SubscribeClient
class TestMQTT1(MQTTTestCase):
def test(self):
import sys
from django.core.management.base import OutputWrapper
from django.core.management.color import color_style, no_style
# seed
from django.core import management
management.call_command('mqttseed',
verbosity=1)
print('>> Processing messages...')
self.assertEqual(True, True)
class TestMQTT2(MQTTTestCase):
def test(self):
self.assertEqual(True, True)
| Add more time to mqtt.test.client | Add more time to mqtt.test.client
| Python | bsd-3-clause | EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient |
384822f44d0731f425698cc67115d179d8d13e4c | examples/mastery.py | examples/mastery.py | import cassiopeia as cass
from cassiopeia.core import Summoner
def test_cass():
name = "Kalturi"
masteries = cass.get_masteries()
for mastery in masteries:
print(mastery.name)
if __name__ == "__main__":
test_cass()
| import cassiopeia as cass
def print_masteries():
for mastery in cass.get_masteries():
print(mastery.name)
if __name__ == "__main__":
print_masteries()
| Remove redundant import, change function name. | Remove redundant import, change function name.
| Python | mit | 10se1ucgo/cassiopeia,meraki-analytics/cassiopeia,robrua/cassiopeia |
e49638c1b2f844e3fa74e00b0d0a96b7c9774c24 | test/test_box.py | test/test_box.py | from nex import box
def test_glue_flex():
h_box = box.HBox(contents=[box.Glue(dimen=100, stretch=50, shrink=20),
box.Glue(dimen=10, stretch=350, shrink=21)],
set_glue=False)
assert h_box.stretch == [50 + 350]
assert h_box.shrink == [20 + 21]
def test_glue_flex_set():
h_box = box.HBox(contents=[box.Glue(dimen=100, stretch=50, shrink=20),
box.Glue(dimen=10, stretch=350, shrink=21)],
set_glue=True)
assert h_box.stretch == [0]
assert h_box.shrink == [0]
| from nex.dampf.dvi_document import DVIDocument
from nex import box, box_writer
def test_glue_flex():
h_box = box.HBox(contents=[box.Glue(dimen=100, stretch=50, shrink=20),
box.Glue(dimen=10, stretch=350, shrink=21)],
set_glue=False)
assert h_box.stretch == [50 + 350]
assert h_box.shrink == [20 + 21]
def test_glue_flex_set():
h_box = box.HBox(contents=[box.Glue(dimen=100, stretch=50, shrink=20),
box.Glue(dimen=10, stretch=350, shrink=21)],
set_glue=True)
assert h_box.stretch == [0]
assert h_box.shrink == [0]
def test_box_writer():
doc = DVIDocument(magnification=1000)
lay_list = [
box.Rule(1, 1, 1),
box.Glue(1, 2, 3),
box.HBox([
box.Glue(3, 2, 1),
box.Rule(3, 3, 3),
]),
]
box_writer.write_box_to_doc(doc, lay_list)
| Add basic test for box writer | Add basic test for box writer
| Python | mit | eddiejessup/nex |
76f1ae6bfc6ad22cc06c012a6b96cbf6b12b8d8a | registration/admin.py | registration/admin.py | from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
| from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
| Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users. | Use raw_id_fields for the relation from RegistrationProfile to User, for sites which have huge numbers of users.
| Python | bsd-3-clause | stefankoegl/django-registration-couchdb,ogirardot/django-registration,andresdouglas/django-registration,bruth/django-registration2,stefankoegl/django-couchdb-utils,ogirardot/django-registration,ratio/django-registration,danielsokolowski/django-registration,wuyuntao/django-registration,stefankoegl/django-couchdb-utils,stefankoegl/django-registration-couchdb,schmidsi/django-registration,danielsokolowski/django-registration,wuyuntao/django-registration,stefankoegl/django-registration-couchdb,stefankoegl/django-couchdb-utils |
384b2f1725e01d7f130670598821f03df64d32fe | hello_flask/hello_flask_web.py | hello_flask/hello_flask_web.py | from flask import Flask
import logging
app = Flask(__name__)
app.debug = True
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
@app.route('/')
def root_url():
""" Just return hello world """
return 'Hello world!'
if __name__ == '__main__':
app.run(debug=True, port=8080) # pragma: no cover
| from flask import Flask, json
from time import sleep
import logging
import os
app = Flask(__name__)
app.debug = True
stream_handler = logging.StreamHandler()
app.logger.addHandler(stream_handler)
app.logger.setLevel(logging.INFO)
@app.route('/')
@app.route('/delay=<int:delay>')
def root_url(delay=0):
""" Just return hello world """
if delay > 0:
sleep(delay)
return 'Hello world!'
@app.route('/env/')
@app.route('/env')
def env_url():
""" Return all environment variables """
return json.jsonify(os.environ.items())
if __name__ == '__main__':
app.run(debug=True, port=8080) # pragma: no cover
| Add a new url endpoint for listing all env vars and a delay to root | Add a new url endpoint for listing all env vars and a delay to root
| Python | apache-2.0 | CiprianAlt/hello_flask,CiprianAlt/hello_flask,ciprianc/hello_flask,ciprianc/hello_flask |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.