commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
1e931e9aac18f393de786894d9e26ecccc251135 | server/models/_generate_superpixels.py | server/models/_generate_superpixels.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import sys
# Worker-defined inputs
originalFile = globals()['originalFile']
segmentation_helpersPath = globals()['segmentation_helpersPath']
segmentation_helpersDirPath = os.path.dirname(segmentation_helpersPath)
if segmentation_helpersDirPath not in sys.path:
sys.path.append(segmentation_helpersDirPath)
from segmentation_helpers.scikit import ScikitSegmentationHelper # noqa
with open(originalFile, 'rb') as originalFileStream:
# Scikit-Image is ~70ms faster at decoding image data
originalImageData = ScikitSegmentationHelper.loadImage(originalFileStream)
superpixelsData = ScikitSegmentationHelper.superpixels(originalImageData)
superpixelsEncodedStream = ScikitSegmentationHelper.writeImage(
superpixelsData, 'png')
superpixelsEncodedBytes = superpixelsEncodedStream.getvalue()
| ###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import os
import sys
# Worker-defined inputs
originalFile = globals()['originalFile']
segmentation_helpersPath = globals()['segmentation_helpersPath']
segmentation_helpersDirPath = os.path.dirname(segmentation_helpersPath)
if segmentation_helpersDirPath not in sys.path:
sys.path.append(segmentation_helpersDirPath)
from segmentation_helpers.scikit import ScikitSegmentationHelper # noqa
with open(originalFile, 'rb') as originalFileStream:
# Scikit-Image is ~70ms faster at decoding image data
originalImageData = ScikitSegmentationHelper.loadImage(originalFileStream)
superpixelsData = ScikitSegmentationHelper.superpixels(originalImageData)
superpixelsEncodedStream = ScikitSegmentationHelper.writeImage(
superpixelsData, 'png')
superpixelsEncodedBytes = superpixelsEncodedStream.getvalue()
| Fix girder_work script bug: PEP 263 is not compatible with exec | Fix girder_work script bug: PEP 263 is not compatible with exec
| Python | apache-2.0 | ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive,ImageMarkup/isic-archive |
a79a3f7c42c858ae42c618479654cd7589de05b9 | zeeko/utils/tests/test_hmap.py | zeeko/utils/tests/test_hmap.py | # -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
@pytest.mark.skip
def test_hmap(items):
"""docstring for test"""
h = HashMap(10)
if len(items):
with pytest.raises(KeyError):
h[items[0]]
for item in items:
h.add(item)
assert len(h) == len(items)
for i, item in enumerate(items):
assert h[item] == i
assert repr(h) == "HashMap({0!r})".format(items)
if len(items):
item = items[0]
del h[item]
assert len(h) == len(items) - 1
assert item not in h
| # -*- coding: utf-8 -*-
import pytest
from ..hmap import HashMap
@pytest.fixture(params=[0,1,5,9])
def n(request):
"""Number of items"""
return request.param
@pytest.fixture
def items(n):
"""A list of strings."""
return ["item{0:d}".format(i) for i in range(n)]
| Remove unused tests for hash map | Remove unused tests for hash map
| Python | bsd-3-clause | alexrudy/Zeeko,alexrudy/Zeeko |
311a858ecbe7d34f9f68a18a3735db9da8b0e692 | tests/utils.py | tests/utils.py | import atexit
import tempfile
import sys
import mock
from selenium import webdriver
import os
def build_mock_mapping(name):
mock_driver = mock.Mock()
browser_mapping = {name: mock_driver}
mock_driver.return_value.name = name
return browser_mapping
test_driver = None
def get_driver():
global test_driver
if not test_driver:
options = webdriver.ChromeOptions()
options.add_argument('headless')
chrome = webdriver.Chrome(chrome_options=options)
atexit.register(chrome.quit)
chrome.delete_all_cookies()
chrome.switch_to.default_content()
return chrome
def make_temp_page(src):
f = tempfile.mktemp(".html")
fh = open(f, "w")
fh.write(src.replace("\n", ""))
fh.close()
atexit.register(lambda: os.remove(f))
return "file://%s" % f
def mock_open():
if sys.version_info >= (3, 0, 0):
return mock.patch("builtins.open")
return mock.patch("__builtin__.open")
| import atexit
import tempfile
import sys
import mock
from selenium import webdriver
import os
def build_mock_mapping(name):
mock_driver = mock.Mock()
browser_mapping = {name: mock_driver}
mock_driver.return_value.name = name
return browser_mapping
test_driver = None
def get_driver():
global test_driver
if not test_driver:
options = webdriver.ChromeOptions()
options.add_argument('headless')
test_driver = webdriver.Chrome(chrome_options=options)
atexit.register(test_driver.quit)
test_driver.delete_all_cookies()
test_driver.switch_to.default_content()
return test_driver
def make_temp_page(src):
f = tempfile.mktemp(".html")
fh = open(f, "w")
fh.write(src.replace("\n", ""))
fh.close()
atexit.register(lambda: os.remove(f))
return "file://%s" % f
def mock_open():
if sys.version_info >= (3, 0, 0):
return mock.patch("builtins.open")
return mock.patch("__builtin__.open")
| Fix global test driver initialization | Fix global test driver initialization
| Python | mit | alisaifee/holmium.core,alisaifee/holmium.core,alisaifee/holmium.core,alisaifee/holmium.core |
e8da41193238a7c677ec7ff8339095ec3e71be3b | track_count.py | track_count.py | from report import *
def show_track_count(S):
print "Track Count\t\tSubmission Count"
for (track, count) in S.track_count().items():
if track:
print "%s\t\t%s" % (track.ljust(20), count)
if __name__ == "__main__":
# S = ALL.standard().vote_cutoff(4.0)
S = ALL.standard().filter(lambda s: s.accepted)
show_track_count(S)
| from report import *
def show_track_count(S):
print "Track Count".ljust(40) + "\t\tSubmission Count"
items = S.track_count().items()
total = sum([count for (track, count) in items])
for (track, count) in sorted(items, cmp=lambda (a_track, a_count), (b_track, b_count): cmp(b_count, a_count)):
if track:
print "%s\t\t%s" % (track.ljust(40), count)
print "Total".ljust(40) + "\t\t%s" % total
if __name__ == "__main__":
# S = ALL.standard().vote_cutoff(4.0)
S = ALL.standard() #.filter(lambda s: s.accepted)
show_track_count(S)
| Fix formatting, show total and sort | Fix formatting, show total and sort | Python | epl-1.0 | tracymiranda/pc-scripts,tracymiranda/pc-scripts |
17015ecf48ec37909de6de2c299454fc89b592e9 | tests/test_gmaps.py | tests/test_gmaps.py | # -*- coding: UTF-8 -*-
from base import TestCase
from jinja2_maps.gmaps import gmaps_url
class TestGmaps(TestCase):
def test_url_dict(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,42z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78), zoom=42))
| # -*- coding: UTF-8 -*-
from base import TestCase
from jinja2_maps.gmaps import gmaps_url
class TestGmaps(TestCase):
def test_url_dict(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,42z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78), zoom=42))
def test_url_dict_no_zoom(self):
url = "https://www.google.com/maps/place/12.34,56.78/@12.34,56.78,16z"
self.assertEquals(url,
gmaps_url(dict(latitude=12.34, longitude=56.78)))
| Add failing test for URL without zoom | Add failing test for URL without zoom
| Python | mit | bfontaine/jinja2_maps |
73673598e1998252b16b48d31b800ab0fb441392 | pml/cs.py | pml/cs.py | """
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
raise NotImplementedError()
def put(self, pv, value):
raise NotImplementedError()
class NullControlSystem(ControlSystem):
def __init__(self):
pass
def get(self, pv):
pass
def put(self, pv, value):
pass
| """
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
| Add documentation for the control system | Add documentation for the control system
| Python | apache-2.0 | willrogers/pml,willrogers/pml |
3c735d18bdcff28bbdd765b131649ba57fb612b0 | hy/models/string.py | hy/models/string.py | # Copyright (c) 2013 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.models import HyObject
import sys
if sys.version_info[0] >= 3:
str_type = str
else:
str_type = unicode
class HyString(HyObject, str_type):
"""
Generic Hy String object. Helpful to store string literals from Hy
scripts. It's either a ``str`` or a ``unicode``, depending on the
Python version.
"""
def __new__(cls, value):
obj = str_type.__new__(cls, value)
return obj
| # Copyright (c) 2013 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from hy.models import HyObject
import sys
if sys.version_info[0] >= 3:
str_type = str
else:
str_type = unicode
class HyString(HyObject, str_type):
"""
Generic Hy String object. Helpful to store string literals from Hy
scripts. It's either a ``str`` or a ``unicode``, depending on the
Python version.
"""
pass
| Revert "Revert "Remove useless code"" | Revert "Revert "Remove useless code""
This reverts commit 262da59c7790cdadd60ea9612bc9e3c1616863fd.
Conflicts:
hy/models/string.py
| Python | mit | ALSchwalm/hy,aisk/hy,paultag/hy,tianon/hy,hcarvalhoalves/hy,Foxboron/hy,Tritlo/hy,mtmiller/hy,michel-slm/hy,farhaven/hy,freezas/hy,zackmdavis/hy,tianon/hy,gilch/hy,aisk/hy,larme/hy,tuturto/hy,timmartin/hy,farhaven/hy,kirbyfan64/hy,farhaven/hy,algernon/hy,Foxboron/hy,hcarvalhoalves/hy,kirbyfan64/hy,adamfeuer/hy,jakirkham/hy,tianon/hy,larme/hy,larme/hy,kartikm/hy,aisk/hy |
60870a3e471637d44da32f3aac74064e4ca60208 | pyplot.py | pyplot.py | #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""Module to bundle plotting scripts
`activate-global-python-argcomplete` must be run to enable auto completion """
import argparse
import argcomplete
import plotter
def parse_arguments():
"""Argument Parser, providing available scripts"""
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(
title = 'plotter',
description = 'available plotting scripts'
)
module_subparser = {}
for module_str in plotter.__all__:
module = __import__('.'.join(('plotter', module_str)), fromlist=module_str)
module_subparser[module_str] = subparsers.add_parser(
module_str, parents=[module.get_parser(add_help=False)],
help=module.__doc__.split('\n', 1)[0]
)
configure = subparsers.add_parser('configure', help='configure this script.')
argcomplete.autocomplete(parser)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_arguments()
from plotter.plotn import main
main(args)
| #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""Module to bundle plotting scripts
`activate-global-python-argcomplete` must be run to enable auto completion """
import argparse
import argcomplete
import plotter
def parse_arguments():
"""Argument Parser, providing available scripts"""
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(
title = 'plotter',
description = 'available plotting scripts',
dest='used_subparser',
)
module_subparser = {}
for module_str in plotter.__all__:
module = __import__('plotter.' + module_str, fromlist=module_str)
module_subparser[module_str] = subparsers.add_parser(
module_str, parents=[module.get_parser(add_help=False)],
help=module.__doc__.split('\n', 1)[0]
)
module_subparser[module_str].set_defaults(run=module.main)
configure = subparsers.add_parser('configure', help='configure this script.')
argcomplete.autocomplete(parser)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_arguments()
args.run(args)
| Use `set_defaults` of subparser to launch scripts | Use `set_defaults` of subparser to launch scripts
| Python | mit | DerWeh/pyplot |
aac598d64fc0fa50cc068fc50173068e5d89b3fd | segpy/ext/numpyext.py | segpy/ext/numpyext.py | """Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'l': numpy.dtype('i4'),
'h': numpy.dtype('i2'),
'f': numpy.dtype('f4'),
'b': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
| """Optional interoperability with Numpy."""
import numpy
NUMPY_DTYPES = {'ibm': numpy.dtype('f4'),
'int32': numpy.dtype('i4'),
'int16': numpy.dtype('i2'),
'float32': numpy.dtype('f4'),
'int8': numpy.dtype('i1')}
def make_dtype(data_sample_format):
"""Convert a SEG Y data sample format to a compatible numpy dtype.
Note :
IBM float data sample formats ('ibm') will correspond to IEEE float data types.
Args:
data_sample_format: A data sample format string.
Returns:
A numpy.dtype instance.
Raises:
ValueError: For unrecognised data sample format strings.
"""
try:
return NUMPY_DTYPES[data_sample_format]
except KeyError:
raise ValueError("Unknown data sample format string {!r}".format(data_sample_format))
| Update numpy dtypes extension for correct type codes. | Update numpy dtypes extension for correct type codes.
| Python | agpl-3.0 | hohogpb/segpy,stevejpurves/segpy,abingham/segpy,asbjorn/segpy,kjellkongsvik/segpy,Kramer477/segpy,kwinkunks/segpy |
2ba28c83de33ebc75f386d127d0c55e17248a94b | mapclientplugins/meshgeneratorstep/__init__.py | mapclientplugins/meshgeneratorstep/__init__.py |
"""
MAP Client Plugin
"""
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = ''
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc |
"""
MAP Client Plugin
"""
__version__ = '0.2.0'
__author__ = 'Richard Christie'
__stepname__ = 'Mesh Generator'
__location__ = 'https://github.com/ABI-Software/mapclientplugins.meshgeneratorstep'
# import class that derives itself from the step mountpoint.
from mapclientplugins.meshgeneratorstep import step
# Import the resource file when the module is loaded,
# this enables the framework to use the step icon.
from . import resources_rc
| Add location to step metadata. | Add location to step metadata.
| Python | apache-2.0 | rchristie/mapclientplugins.meshgeneratorstep |
fb8c2fb065449a436dd8ffa11b469bb2f22a9ad1 | spider.py | spider.py | from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
| from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
| Add web crawling rule and dataset url regex exp | Add web crawling rule and dataset url regex exp
| Python | mit | MaxLikelihood/CODE |
7c69dc5bddc7136c274f039223686a92cffd693a | tomviz/python/setup.py | tomviz/python/setup.py | from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='[email protected]',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
| from setuptools import setup, find_packages
jsonpatch_uri \
= 'jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip'
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='[email protected]',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': [
jsonpatch_uri, 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
| Fix flake8 line length issue | Fix flake8 line length issue
Signed-off-by: Chris Harris <[email protected]>
| Python | bsd-3-clause | OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz |
a8283f5d2c1d970b7b676d491ad8c9472abfe667 | boardinghouse/tests/test_template_tag.py | boardinghouse/tests/test_template_tag.py | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import *
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | from django.test import TestCase
from .models import AwareModel, NaiveModel
from ..templatetags.boardinghouse import schema_name, is_schema_aware, is_shared_model
from ..models import Schema
class TestTemplateTags(TestCase):
def test_is_schema_aware_filter(self):
self.assertTrue(is_schema_aware(AwareModel()))
self.assertFalse(is_schema_aware(NaiveModel()))
def test_is_shared_model_filter(self):
self.assertFalse(is_shared_model(AwareModel()))
self.assertTrue(is_shared_model(NaiveModel()))
def test_schema_name_filter(self):
Schema.objects.create(name='Schema Name', schema='foo')
self.assertEquals('Schema Name', schema_name('foo'))
self.assertEquals('no schema', schema_name(None))
self.assertEquals('no schema', schema_name(''))
self.assertEquals('no schema', schema_name(False))
self.assertEquals('no schema', schema_name('foobar'))
self.assertEquals('no schema', schema_name('foo_'))
self.assertEquals('no schema', schema_name('foofoo')) | Fix tests since we changed imports. | Fix tests since we changed imports.
--HG--
branch : schema-invitations
| Python | bsd-3-clause | schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse |
7ce419de1f39050940b8399401a77b2096b74ca2 | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.10.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.11.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.11.0 | Increment version number to 0.11.0
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers |
0f49230309ac115ff78eddd36bcd153d7f3b75ea | data_aggregator/threads.py | data_aggregator/threads.py | import queue
import threading
from multiprocessing import Queue
class ThreadPool():
def __init__(self, processes=20):
self.processes = processes
self.threads = [Thread() for _ in range(0, processes)]
self.mp_queue = Queue()
def yield_dead_threads(self):
for thread in self.threads:
if not thread.is_alive():
yield thread
def map(self, func, values):
completed_count = 0
values_iter = iter(values)
while completed_count < len(values):
try:
self.mp_queue.get_nowait()
completed_count += 1
except queue.Empty:
pass
for thread in self.yield_dead_threads():
try:
# run next job
job = next(values_iter)
thread.run(func, job, self.mp_queue)
except StopIteration:
break
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
pass
class Thread():
def __init__(self):
self.thread = None
def run(self, target, *args, **kwargs):
self.thread = threading.Thread(target=target,
args=args,
kwargs=kwargs)
self.thread.start()
def is_alive(self):
if self.thread:
return self.thread.is_alive()
else:
return False
| import queue
import threading
from multiprocessing import Queue
class ThreadPool():
def __init__(self, processes=20):
self.processes = processes
self.threads = [Thread() for _ in range(0, processes)]
self.mp_queue = Queue()
def yield_dead_threads(self):
for thread in self.threads:
if not thread.is_alive():
yield thread
def map(self, func, values):
completed_count = 0
values_iter = iter(values)
while completed_count < len(values):
try:
self.mp_queue.get_nowait()
completed_count += 1
except queue.Empty:
pass
for thread in self.yield_dead_threads():
try:
# run thread with the next value
value = next(values_iter)
thread.run(func, value, self.mp_queue)
except StopIteration:
break
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
pass
class Thread():
def __init__(self):
self.thread = None
def run(self, target, *args, **kwargs):
self.thread = threading.Thread(target=target,
args=args,
kwargs=kwargs)
self.thread.start()
def is_alive(self):
if self.thread:
return self.thread.is_alive()
else:
return False
| Remove reference to "job" from ThreadPool | Remove reference to "job" from ThreadPool
| Python | apache-2.0 | uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics |
f9ca473abf7aea3cc146badf2d45ae715f635aac | kqueen_ui/server.py | kqueen_ui/server.py | from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('KQUEEN_UI_HOST'),
port=int(app.config.get('KQUEEN_UI_PORT'))
)
| from .config import current_config
from flask import Flask
from flask import redirect
from flask import url_for
from flask.ext.babel import Babel
from kqueen_ui.blueprints.registration.views import registration
from kqueen_ui.blueprints.ui.views import ui
from werkzeug.contrib.cache import SimpleCache
import logging
import os
logger = logging.getLogger(__name__)
cache = SimpleCache()
def create_app(config_file=None):
app = Flask(__name__, static_folder='./asset/static')
app.register_blueprint(ui, url_prefix='/ui')
app.register_blueprint(registration, url_prefix='/registration')
# load configuration
config = current_config(config_file)
app.config.from_mapping(config.to_dict())
app.logger.setLevel(getattr(logging, app.config.get('LOG_LEVEL')))
app.logger.info('Loading configuration from {}'.format(config.source_file))
Babel(app)
return app
app = create_app()
@app.route('/')
def root():
return redirect(url_for('ui.index'), code=302)
def run():
logger.debug('kqueen_ui starting')
app.run(
host=app.config.get('HOST'),
port=int(app.config.get('PORT'))
)
| Use correct parameter for HOST and PORT | Use correct parameter for HOST and PORT
| Python | mit | atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui |
de962f504db139500573457264a3dd1e257e8cc0 | wagtail_mvc/decorators.py | wagtail_mvc/decorators.py | # -*- coding: utf-8 -*-
"""
wagtail_mvc decorators
"""
from __future__ import unicode_literals
def wagtail_mvc_url(func):
"""
Decorates an existing method responsible for generating a url
prepends the parent url to the generated url to account for
:param func: The method to decorate
:return: Full url
"""
def outer(self, *args, **kwargs):
parts = self.get_parent().url.split('/')
parts += func(self, *args, **kwargs).split('/')
return '/{0}/'.format('/'.join([part for part in parts if part]))
return outer
| # -*- coding: utf-8 -*-
"""
wagtail_mvc decorators
"""
from __future__ import unicode_literals
def wagtail_mvc_url(*decorator_args, **decorator_kwargs):
"""
Decorates an existing method responsible for generating a url
prepends the parent url to the generated url to account for
:param func: The method to decorate
:return: Full url
"""
def decorator(func):
def outer(self, *args, **kwargs):
parent_attr = decorator_kwargs.get('parent_attr')
if parent_attr:
parent = getattr(self, parent_attr, None)
else:
parent = self.get_parent()
parts = parent.url.split('/')
parts += func(self, *args, **kwargs).split('/')
return '/{0}/'.format('/'.join([part for part in parts if part]))
return outer
if len(decorator_args) == 1 and callable(decorator_args[0]):
# We assume the decorator function has not been called
# or passed any arguments and return the result of calling
# the decorator function
return decorator(decorator_args[0])
return decorator
| Allow decorator to be called with optional args | Allow decorator to be called with optional args
| Python | mit | fatboystring/Wagtail-MVC,fatboystring/Wagtail-MVC |
232bc2bb83190482c1125ca5879ffb6f11d67b40 | puzzlehunt_server/settings/travis_settings.py | puzzlehunt_server/settings/travis_settings.py | from .base_settings import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
| from .base_settings import *
import os
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
} | Fix logging for testing environment | Fix logging for testing environment
| Python | mit | dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server |
fe676a041b793f55d33bfd27eb2b4fdfe7d93bb6 | twilio/rest/resources/pricing/__init__.py | twilio/rest/resources/pricing/__init__.py | from .voice import (
Voice,
VoiceCountry,
VoiceCountries,
VoiceNumber,
VoiceNumbers,
)
from .phone_numbers import (
PhoneNumberCountries,
PhoneNumberCountry,
PhoneNumbers,
)
| from twilio.rest.pricing.voice import (
Voice,
VoiceCountry,
VoiceCountries,
VoiceNumber,
VoiceNumbers,
)
from twilio.rest.pricing.phone_number import (
PhoneNumberCountries,
PhoneNumberCountry,
PhoneNumber,
)
| Change import path for pricing | Change import path for pricing
| Python | mit | tysonholub/twilio-python,twilio/twilio-python |
0e779581be648ca80eea6b97f9963606d85659b9 | opensfm/commands/__init__.py | opensfm/commands/__init__.py |
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
]
|
import extract_metadata
import detect_features
import match_features
import create_tracks
import reconstruct
import mesh
import undistort
import compute_depthmaps
import export_ply
import export_openmvs
import export_visualsfm
opensfm_commands = [
extract_metadata,
detect_features,
match_features,
create_tracks,
reconstruct,
mesh,
undistort,
compute_depthmaps,
export_ply,
export_openmvs,
export_visualsfm,
]
| Add exporter to VisualSfM format | Add exporter to VisualSfM format
| Python | bsd-2-clause | BrookRoberts/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,BrookRoberts/OpenSfM,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM,BrookRoberts/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,oscarlorentzon/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,BrookRoberts/OpenSfM,BrookRoberts/OpenSfM,mapillary/OpenSfM,mapillary/OpenSfM,sunbingfengPI/OpenSFM_Test,oscarlorentzon/OpenSfM |
416575ca3cc684925be0391b43b98a9fa1d9f909 | ObjectTracking/testTrack.py | ObjectTracking/testTrack.py |
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display
# Open reference video
cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video')
# Select reference image
img=cam.getFrame(50)
modelImage = img.crop(255, 180, 70, 20)
modelImage = Image('kite_detail.jpg')
ts = []
disp=Display()
for i in range(0,50):
img = cam.getImage()
while (disp.isNotDone()):
img = cam.getImage()
bb = (255, 180, 70, 20)
ts = img.track("camshift",ts,modelImage,bb, num_frames = 1)
# now here in first loop iteration since ts is empty,
# img0 and bb will be considered.
# New tracking object will be created and added in ts (TrackSet)
# After first iteration, ts is not empty and hence the previous
# image frames and bounding box will be taken from ts and img0
# and bb will be ignored.
ts.drawPath()
img.show()
|
from SimpleCV import ColorSegmentation, Image, Camera, VirtualCamera, Display, Color
# Open reference video
cam=VirtualCamera('/media/bat/DATA/Baptiste/Nautilab/kite_project/zenith-wind-power-read-only/KiteControl-Qt/videos/kiteFlying.avi','video')
# Select reference image
img=cam.getFrame(50)
modelImage = img.crop(255, 180, 70, 20)
modelImage = Image('kite_detail.jpg')
ts = []
disp=Display()
for i in range(0,50):
img = cam.getImage()
while (disp.isNotDone()):
img = cam.getImage()
bb = (255, 180, 70, 20)
ts = img.track("camshift",ts,modelImage,bb, num_frames = 1)
modelImage = Image('kite_detail.jpg')
# now here in first loop iteration since ts is empty,
# img0 and bb will be considered.
# New tracking object will be created and added in ts (TrackSet)
# After first iteration, ts is not empty and hence the previous
# image frames and bounding box will be taken from ts and img0
# and bb will be ignored.
ts.draw()
ts.drawBB()
ts.showCoordinates()
img.show()
| Save the image of the selection (to be able to reinitialise later) | Save the image of the selection (to be able to reinitialise later)
| Python | mit | baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite |
a0ac251bec891a6c511ea1c0b11faa6525b81545 | bfg9000/languages.py | bfg9000/languages.py | ext2lang = {
'.cpp': 'c++',
'.c': 'c',
}
| ext2lang = {
'.c' : 'c',
'.cpp': 'c++',
'.cc' : 'c++',
'.cp' : 'c++',
'.cxx': 'c++',
'.CPP': 'c++',
'.c++': 'c++',
'.C' : 'c++',
}
| Support more C++ extensions by default | Support more C++ extensions by default
| Python | bsd-3-clause | jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000 |
a41a76b7e4cdf4a8cbc533550963921839dcd998 | mopidy_pandora/rpc.py | mopidy_pandora/rpc.py | import json
import requests
class RPCClient(object):
def __init__(self, hostname, port):
self.url = 'http://' + str(hostname) + ':' + str(port) + '/mopidy/rpc'
self.id = 0
def _do_rpc(self, method, params=None):
self.id += 1
data = { 'method': method, 'jsonrpc': '2.0', 'id': self.id }
if params is not None:
data['params'] = params
return requests.request('POST', self.url, data=json.dumps(data), headers={'Content-Type': 'application/json'})
def set_repeat(self):
self._do_rpc('core.tracklist.set_repeat', {'value': True})
def get_current_track_uri(self):
response = self._do_rpc('core.playback.get_current_tl_track')
return response.json()['result']['track']['uri']
| import json
import requests
class RPCClient(object):
def __init__(self, hostname, port):
self.url = 'http://' + str(hostname) + ':' + str(port) + '/mopidy/rpc'
self.id = 0
def _do_rpc(self, method, params=None):
self.id += 1
data = {'method': method, 'jsonrpc': '2.0', 'id': self.id}
if params is not None:
data['params'] = params
return requests.request('POST', self.url, data=json.dumps(data), headers={'Content-Type': 'application/json'})
def set_repeat(self):
self._do_rpc('core.tracklist.set_repeat', {'value': True})
def get_current_track_uri(self):
response = self._do_rpc('core.playback.get_current_tl_track')
return response.json()['result']['track']['uri']
| Fix formatting errors reported by flake8. | Fix formatting errors reported by flake8.
| Python | apache-2.0 | rectalogic/mopidy-pandora,jcass77/mopidy-pandora |
0788aaf316a2b200c5283fe9f5f902a8da701403 | calexicon/internal/tests/test_julian.py | calexicon/internal/tests/test_julian.py | import unittest
from calexicon.internal.julian import distant_julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
| import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
| Add a test for julian_to_gregorian. | Add a test for julian_to_gregorian.
| Python | apache-2.0 | jwg4/qual,jwg4/calexicon |
7f1542bc52438e6c9796e776603553d7f5a9df7f | pySpatialTools/utils/util_classes/__init__.py | pySpatialTools/utils/util_classes/__init__.py |
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from general_mapper import General1_1Mapper
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
|
"""
Util classes
------------
Classes which represent data types useful for the package pySpatialTools.
"""
from spdesc_mapper import Sp_DescriptorMapper
from spatialelements import SpatialElementsCollection, Locations
from Membership import Membership
from mapper_vals_i import Map_Vals_i, create_mapper_vals_i
| Debug in importing deleted module. | Debug in importing deleted module.
| Python | mit | tgquintela/pySpatialTools,tgquintela/pySpatialTools |
62a76827ecf7c148101b62925dea04f63709012a | sublime/User/update_user_settings.py | sublime/User/update_user_settings.py | import json
import urllib2
import sublime
import sublime_plugin
GIST_URL = u'https://raw.githubusercontent.com/RomuloOliveira/dot-files/master/sublime/User/Preferences.sublime-settings' # noqa
class UpdateUserSettingsCommand(sublime_plugin.TextCommand):
def run(self, edit):
gist_settings = self._get_settings_from_gist(GIST_URL)
sublime_settings = sublime.load_settings(
'Preferences.sublime-settings'
)
self._update_settings(gist_settings, sublime_settings)
@staticmethod
def _get_settings_from_gist(url):
try:
response = urllib2.urlopen(url)
settings = json.loads(response.read())
except (urllib2.URLError, ValueError) as e:
sublime.error_message('Could not retrieve settings: {}'.format(e))
raise
return settings
@staticmethod
def _update_settings(settings_dict, sublime_settings):
for key, value in settings_dict.items():
sublime_settings.set(key, value)
sublime.save_settings('Preferences.sublime-settings')
sublime.status_message('Settings updated')
| import json
import urllib
import sublime
import sublime_plugin
GIST_URL = 'https://raw.githubusercontent.com/RomuloOliveira/dot-files/master/sublime/User/Preferences.sublime-settings' # noqa
class UpdateUserSettingsCommand(sublime_plugin.TextCommand):
def run(self, edit):
gist_settings = self._get_settings_from_gist(GIST_URL)
sublime_settings = sublime.load_settings(
'Preferences.sublime-settings'
)
self._update_settings(gist_settings, sublime_settings)
@staticmethod
def _get_settings_from_gist(url):
try:
response = urllib.request.urlopen(url)
settings = json.loads(response.read().decode('utf-8'))
except (urllib.error.URLError, ValueError) as e:
sublime.error_message('Could not retrieve settings: {}'.format(e))
raise
return settings
@staticmethod
def _update_settings(settings_dict, sublime_settings):
for key, value in settings_dict.items():
sublime_settings.set(key, value)
sublime.save_settings('Preferences.sublime-settings')
sublime.status_message('Settings updated')
| Update command to work with sublime 3 | Update command to work with sublime 3
| Python | apache-2.0 | RomuloOliveira/dot-files,RomuloOliveira/unix-files,RomuloOliveira/dot-files |
48fab607b1152b8b93cdb0cc0dc5c300dafecf4c | common/hil_slurm_settings.py | common/hil_slurm_settings.py | """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue [email protected]
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/local/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/moc_hil_ulsr/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/moc_hil_ulsr/hil_monitor.log'
HIL_ENDPOINT = "http://128.31.28.156:80"
HIL_USER = 'admin'
HIL_PW = 'NavedIsSleepy'
HIL_SLURM_PROJECT = 'slurm'
HIL_PARTITION_PREFIX = 'HIL_partition'
HIL_PARTITION_PREFIX = 'debug'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| """
MassOpenCloud / Hardware Isolation Layer (HIL)
Slurm / HIL Control Settings
May 2017, Tim Donahue [email protected]
"""
DEBUG = True
SLURM_INSTALL_DIR = '/usr/bin/'
HIL_SLURMCTLD_PROLOG_LOGFILE = '/var/log/moc_hil_ulsr/hil_prolog.log'
HIL_MONITOR_LOGFILE = '/var/log/moc_hil_ulsr/hil_monitor.log'
HIL_ENDPOINT = "http://128.31.28.156:80"
HIL_USER = 'admin'
HIL_PW = 'NavedIsSleepy'
HIL_SLURM_PROJECT = 'slurm'
HIL_PARTITION_PREFIX = 'HIL_partition'
HIL_RESERVATION_DEFAULT_DURATION = 24 * 60 * 60 # Seconds
HIL_RESERVATION_GRACE_PERIOD = 4 * 60 * 60 # Seconds
# Partition validation controls
RES_CHECK_DEFAULT_PARTITION = False
RES_CHECK_EXCLUSIVE_PARTITION = False
RES_CHECK_SHARED_PARTITION = False
RES_CHECK_PARTITION_STATE = True
# EOF
| Update default settings to match Slurm 17.X / CentOS installation | Update default settings to match Slurm 17.X / CentOS installation
| Python | mit | mghpcc-projects/user_level_slurm_reservations,mghpcc-projects/user_level_slurm_reservations |
0ba9fa847a8b605363b298ecad40cb2fc5870cbb | build_modules.py | build_modules.py | import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
if __name__ == "__main__":
build_modules()
| import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
from voxel_native.scripts.common import is_macos, is_windows, is_linux
if is_windows():
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
elif is_macos() or is_linux():
shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
| Update build script to work correctly on macOS and linux. | Update build script to work correctly on macOS and linux.
| Python | mit | treamology/panda3d-voxels,treamology/panda3d-voxels,treamology/panda3d-voxels |
115a71995f2ceae667c05114da8e8ba21c25c402 | syncplay/__init__.py | syncplay/__init__.py | version = '1.6.5'
revision = ' release'
milestone = 'Yoitsu'
release_number = '86'
projectURL = 'https://syncplay.pl/'
| version = '1.6.6'
revision = ' development'
milestone = 'Yoitsu'
release_number = '87'
projectURL = 'https://syncplay.pl/'
| Move to 1.6.6 dev for further development | Move to 1.6.6 dev for further development | Python | apache-2.0 | alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay |
d6b1f7c03ec2b32823fe2c4214e6521e8074cd9f | commands/join.py | commands/join.py | from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = u""
if message.messagePartsLength < 1:
replytext = u"Please provide a channel for me to join"
else:
allowedChannels = message.bot.factory.settings.get('connection', 'allowedChannels').split(',')
channel = message.messageParts[0]
if channel.startswith('#'):
channel = channel[1:]
if channel not in allowedChannels and not message.bot.factory.isUserAdmin(message.user):
replytext = u"I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
channel = '#' + channel
replytext = u"All right, I'll go to {}. See you there!".format(channel)
message.bot.join(channel)
message.bot.say(message.source, replytext) | from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['join']
helptext = "Makes me join another channel, if I'm allowed to at least"
def execute(self, message):
"""
:type message: IrcMessage
"""
replytext = u""
if message.messagePartsLength < 1:
replytext = u"Please provide a channel for me to join"
else:
allowedChannels = message.bot.factory.settings.get('connection', 'allowedChannels').split(',')
channel = message.messageParts[0].encode('utf8') #Make sure it's a str and not unicode, otherwise Twisted chokes on it
if channel.startswith('#'):
channel = channel[1:]
if channel not in allowedChannels and not message.bot.factory.isUserAdmin(message.user):
replytext = u"I'm sorry, I'm not allowed to go there. Please ask my admin(s) for permission"
else:
channel = '#' + channel
replytext = u"All right, I'll go to {}. See you there!".format(channel)
message.bot.join(channel)
message.bot.say(message.source, replytext) | Make sure the 'channel' argument is not Unicode when we send it, because Twisted doesn't like that | [Join] Make sure the 'channel' argument is not Unicode when we send it, because Twisted doesn't like that
| Python | mit | Didero/DideRobot |
521e24fa115e69bca39d7cca89ce42e8efa3b077 | tools/perf_expectations/PRESUBMIT.py | tools/perf_expectations/PRESUBMIT.py | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == input_api.os_path.basename(path):
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
| #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for perf_expectations.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
details on the presubmit API built into gcl.
"""
UNIT_TESTS = [
'tests.perf_expectations_unittest',
]
PERF_EXPECTATIONS = 'tools/perf_expectations/perf_expectations.json'
def CheckChangeOnUpload(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
return output
def CheckChangeOnCommit(input_api, output_api):
run_tests = False
for path in input_api.LocalPaths():
if PERF_EXPECTATIONS == path:
run_tests = True
output = []
if run_tests:
output.extend(input_api.canned_checks.RunPythonUnitTests(input_api,
output_api,
UNIT_TESTS))
output.extend(input_api.canned_checks.CheckDoNotSubmit(input_api,
output_api))
return output
| Use full pathname to perf_expectations in test. | Use full pathname to perf_expectations in test.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/266055
git-svn-id: http://src.chromium.org/svn/trunk/src@28770 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
Former-commit-id: f9d8e0a8dae19e482d3c435a76b4e38403e646b5 | Python | bsd-3-clause | meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser,meego-tablet-ux/meego-app-browser |
9dd71fe94b57d56a422e784c10c463c22add90c3 | configuration/development.py | configuration/development.py | import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "/home/max/Projects/iis/iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
| import pathlib
_basedir = pathlib.Path(__file__).parents[1]
SQLALCHEMY_DATABASE_URI = (
'sqlite:///' + str(_basedir.joinpath(pathlib.PurePath('app.db')).resolve())
)
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'INSECURE'
MAIL_SERVER = 'localhost'
MAIL_PORT = '25'
MAIL_DEFAULT_SENDER = '[email protected]'
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": '%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
},
},
"handlers": {
"file": {
"level": "DEBUG",
"formatter": "verbose",
"class": "iis.log.LockingFileHandler",
"filename": "./iis.log"
},
},
"loggers": {
"iis": {
"level": "DEBUG",
"handlers": ["file"]
},
}
}
LOGGER_NAME = "iis"
| Fix absolute reference to logfile location | Fix absolute reference to logfile location
| Python | agpl-3.0 | interactomix/iis,interactomix/iis |
b326d43a94058390a559c4c9f55e9cd88dcac747 | adhocracy4/emails/mixins.py | adhocracy4/emails/mixins.py | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
with open(filename, 'rb') as f:
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
from .base import EmailBase
class PlatformEmailMixin:
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = (
finders.find('images/email_logo.png')
or finders.find('images/email_logo.svg')
)
if filename:
if filename.endswith('.png'):
imagetype = 'png'
else:
imagetype = 'svg+xml'
with open(filename, 'rb') as f:
logo = MIMEImage(f.read(), imagetype)
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
class SyncEmailMixin(EmailBase):
"""Send Emails synchronously."""
@classmethod
def send(cls, object, *args, **kwargs):
"""Call dispatch immediately"""
return cls().dispatch(object, *args, **kwargs)
| Set propper mimetype for image attachment | Set propper mimetype for image attachment
| Python | agpl-3.0 | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 |
07a04f63da897ae687fd90039d379482a13372e2 | txircd/modules/rfc/response_error.py | txircd/modules/rfc/response_error.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ErrorResponse(ModuleData):
implements(IPlugin, IModuleData)
name = "errorResponse"
core = True
def actions(self):
return [ ("quit", 10, self.sendError) ]
def sendError(self, user, reason):
user.sendMessage("ERROR", ":Closing Link: {}@{} [{}]".format(user.ident, user.host, reason), to=None, prefix=None)
errorResponse = ErrorResponse() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ErrorResponse(ModuleData):
implements(IPlugin, IModuleData)
name = "ErrorResponse"
core = True
def actions(self):
return [ ("quit", 10, self.sendError) ]
def sendError(self, user, reason):
user.sendMessage("ERROR", ":Closing Link: {}@{} [{}]".format(user.ident, user.host, reason), to=None, prefix=None)
errorResponse = ErrorResponse() | Standardize module names on leading capital letters | Standardize module names on leading capital letters
| Python | bsd-3-clause | ElementalAlchemist/txircd,Heufneutje/txircd |
cb7aeb60fcff7f8fa6ac9e12282bf7dcd71617d8 | heat/tests/clients/test_ceilometer_client.py | heat/tests/clients/test_ceilometer_client.py | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient.v2 import client as cc
from heat.tests import common
from heat.tests import utils
class CeilometerClientPluginTest(common.HeatTestCase):
def test_create(self):
self.patchobject(cc.Client, '_get_alarm_client')
context = utils.dummy_context()
plugin = context.clients.client_plugin('ceilometer')
client = plugin.client()
self.assertIsNotNone(client.alarms)
| #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ceilometerclient.v2 import client as cc
from heat.tests import common
from heat.tests import utils
class CeilometerClientPluginTest(common.HeatTestCase):
def test_create(self):
self.patchobject(cc.Client, '_get_redirect_client')
context = utils.dummy_context()
plugin = context.clients.client_plugin('ceilometer')
client = plugin.client()
self.assertIsNotNone(client.alarms)
| Fix ceilometerclient mocks for 2.8.0 release | Fix ceilometerclient mocks for 2.8.0 release
The function name changed in Iae7d60e1cf139b79e74caf81ed7bdbd0bf2bc473.
Change-Id: I1bbe3f32090b9b1fd7508b1b26665bceeea21f49
| Python | apache-2.0 | openstack/heat,noironetworks/heat,noironetworks/heat,openstack/heat |
12cf7d220408971509b57cb3a60f2d87b4a37477 | facebook_auth/models.py | facebook_auth/models.py | from uuid import uuid1
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
def get_auth_address(request, redirect_to, scope=''):
state = unicode(uuid1())
request.session['state'] = state
return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (
settings.FACEBOOK_APP_ID, redirect_to, scope, state
)
| from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
| Revert "Add support for server side authentication." | Revert "Add support for server side authentication."
This reverts commit 10ae930f6f14c2840d0b87cbec17054b4cc318d2.
Change-Id: Ied52c31f6f28ad635a6e5dae2171df22dc91e42c
Reviewed-on: http://review.pozytywnie.pl:8080/5153
Reviewed-by: Tomasz Wysocki <[email protected]>
Tested-by: Tomasz Wysocki <[email protected]>
| Python | mit | jgoclawski/django-facebook-auth,pozytywnie/django-facebook-auth,pozytywnie/django-facebook-auth,jgoclawski/django-facebook-auth |
4378aef47a7e2b80a4a22af2bbe69ce4b780ab6d | pokr/views/login.py | pokr/views/login.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from flask import g, render_template, redirect, request, url_for
from flask.ext.login import current_user, login_required, logout_user
from social.apps.flask_app.template_filters import backends
def register(app):
@login_required
@app.route('/done/')
def done():
return redirect(request.referrer or url_for('main'))
@app.route('/logout')
def logout():
logout_user()
return redirect(request.referrer or url_for('main'))
@app.before_request
def global_user():
g.user = current_user
@app.context_processor
def inject_user():
user = getattr(g, 'user')
return {
'user': user,
'is_logged': user and not user.is_anonymous()
}
app.context_processor(backends)
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from flask import g, render_template, redirect, request, url_for
from flask.ext.login import current_user, login_required, logout_user
from social.apps.flask_app.template_filters import backends
def register(app):
@login_required
@app.route('/done/')
def done():
return redirect(request.referrer or url_for('main'))
@app.route('/logout')
def logout():
logout_user()
return redirect(request.referrer or url_for('main'))
@app.before_request
def global_user():
g.user = current_user
@app.context_processor
def inject_user():
user = getattr(g, 'user')
return {
'user': user,
'is_logged': user and user.is_authenticated
}
app.context_processor(backends)
| Fix due to Flask-Login version up | Fix due to Flask-Login version up
| Python | apache-2.0 | teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr |
f935eb48517627df679605aaee834165380d74db | django_db_geventpool/backends/postgresql_psycopg2/creation.py | django_db_geventpool/backends/postgresql_psycopg2/creation.py | # coding=utf-8
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber, keepdb)
def _destroy_test_db(self, test_database_name, verbosity, keepdb=False):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity, keepdb)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
| # coding=utf-8
import django
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation as OriginalDatabaseCreation
class DatabaseCreationMixin16(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin16, self)._destroy_test_db(test_database_name, verbosity)
class DatabaseCreationMixin17(object):
def _create_test_db(self, verbosity, autoclobber):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._create_test_db(verbosity, autoclobber)
def _destroy_test_db(self, test_database_name, verbosity):
self.connection.closeall()
return super(DatabaseCreationMixin17, self)._destroy_test_db(test_database_name, verbosity)
if django.VERSION >= (1, 7):
class DatabaseCreationMixin(DatabaseCreationMixin17):
pass
else:
class DatabaseCreationMixin(DatabaseCreationMixin16):
pass
class DatabaseCreation(DatabaseCreationMixin, OriginalDatabaseCreation):
pass
| Fix DatabaseCreation from django 1.7 | Fix DatabaseCreation from django 1.7
| Python | apache-2.0 | jneight/django-db-geventpool,PreppyLLC-opensource/django-db-geventpool |
385655debed235fcb32e70a3f506a6885f3e4e67 | c2cgeoportal/views/echo.py | c2cgeoportal/views/echo.py | from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
def json_base64_encode_chunks(file, chunk_size=65536):
"""
Generate a JSON-wrapped base64-encoded string.
See http://en.wikipedia.org/wiki/Base64
"""
yield '{"data":"'
while True:
line = file.read(chunk_size)
if not line:
break
yield b64encode(line)
yield '"}'
@view_config(route_name='echo')
def echo(request):
"""
Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
The response is JSON-wrapped and base64-encoded to ensure that there are no special HTML characters or charset problems and so that braindead ext doesn't barf on it.
We use an iterator to avoid loading the whole file into memory.
See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
"""
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = json_base64_encode_chunks(file.file)
response.content_type = 'text/html'
return response
| from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
def json_base64_encode_chunks(file, chunk_size=65536):
"""
Generate a JSON-wrapped base64-encoded string.
See http://en.wikipedia.org/wiki/Base64
"""
yield '{"data":"'
while True:
line = file.read(chunk_size)
if not line:
break
yield b64encode(line)
yield '","success":true}'
@view_config(route_name='echo')
def echo(request):
"""
Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
The response is JSON-wrapped and base64-encoded to ensure that there are no special HTML characters or charset problems and so that braindead ext doesn't barf on it.
We use an iterator to avoid loading the whole file into memory.
See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
"""
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = json_base64_encode_chunks(file.file)
response.content_type = 'text/html'
return response
| Add success=true to satisfy Ext | Add success=true to satisfy Ext
| Python | bsd-2-clause | tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal |
90f4c06cd4186b08758ff599691d1ae1af522590 | cloudkitty/cli/processor.py | cloudkitty/cli/processor.py | # -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
from cloudkitty import service
def main():
service.prepare_service()
# NOTE(mc): This import is done here to ensure that the prepare_service()
# fonction is called before any cfg option. By importing the orchestrator
# file, the utils one is imported too, and then some cfg option are read
# before the prepare_service(), making cfg.CONF returning default values
# systematically.
from cloudkitty import orchestrator
processor = orchestrator.Orchestrator()
try:
processor.process()
except KeyboardInterrupt:
processor.terminate()
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
from cloudkitty import service
def main():
service.prepare_service()
# NOTE(mc): This import is done here to ensure that the prepare_service()
# function is called before any cfg option. By importing the orchestrator
# file, the utils one is imported too, and then some cfg options are read
# before the prepare_service(), making cfg.CONF returning default values
# systematically.
from cloudkitty import orchestrator
processor = orchestrator.Orchestrator()
try:
processor.process()
except KeyboardInterrupt:
processor.terminate()
if __name__ == '__main__':
main()
| Fix two mistakes of method description | Fix two mistakes of method description
Fix two mistakes of method description in processor.py
Change-Id: I3434665b6d458937295b0563ea0cd0ee6aebaca1
| Python | apache-2.0 | openstack/cloudkitty,stackforge/cloudkitty,stackforge/cloudkitty,openstack/cloudkitty |
b7e781eed46503edee25547e8de8831ee6b0cf96 | src/data/download/BN_disease.py | src/data/download/BN_disease.py | # This script downloads weekly dengue statistics from data.gov.bn
import os
import sys
import logging
DIRECTORY = '../../Data/raw/disease_BN'
OUTFILE = "Trend of Notifiable Diseases (2008 - 2012).xlsx"
URL = "https://www.data.gov.bn/Lists/dataset/Attachments/460/Trend%20of%20Notifiable%20Diseases%20(2008%20-%202012).xlsx"
logger = logging.getLogger(__name__)
def download():
if sys.version_info < (3, 0):
try:
os.makedirs(DIRECTORY)
except OSError as e:
pass
import urllib as downloader
from urllib2 import URLError, HTTPError
else:
os.makedirs(DIRECTORY, exist_ok=True)
import urllib.request as downloader
from urllib.error import URLError, HTTPError
output_path = os.path.join(DIRECTORY, OUTFILE)
try:
downloader.urlretrieve(URL, output_path)
logger.info('Downloaded successfully to %s', os.path.abspath(output_path))
except (HTTPError, URLError) as e:
logger.error('Failed to download: %s', e.reason)
if __name__ == "__main__":
DIRECTORY = '../../../Data/raw/disease_BN'
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
download() | # This script downloads weekly dengue statistics from data.gov.bn
import os
import sys
import logging
DIRECTORY = '../../data/raw/disease_BN'
OUTFILE = "Trend of Notifiable Diseases (2008 - 2012).xlsx"
URL = "https://www.data.gov.bn/Lists/dataset/Attachments/460/Trend%20of%20Notifiable%20Diseases%20(2008%20-%202012).xlsx"
logger = logging.getLogger(__name__)
def download():
""" Download disease data from data.gov.bn """
logging.info('Downloading raw data of Notifiable Diseases between 2008 and 2012')
if sys.version_info < (3, 0):
try:
os.makedirs(DIRECTORY)
except OSError as e:
pass
import urllib as downloader
from urllib2 import URLError, HTTPError
else:
os.makedirs(DIRECTORY, exist_ok=True)
import urllib.request as downloader
from urllib.error import URLError, HTTPError
output_path = os.path.join(DIRECTORY, OUTFILE)
try:
downloader.urlretrieve(URL, output_path)
logger.info('Downloaded successfully to %s', os.path.abspath(output_path))
except (HTTPError, URLError) as e:
logger.error('Failed to download: %s', e.reason)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
download() | Add doc str and change logger config | Add doc str and change logger config
| Python | mit | DataKind-SG/healthcare_ASEAN |
c1d889f637d6d2a931f81332a9eef3974dfa18e0 | code/marv/marv/__init__.py | code/marv/marv/__init__.py | # Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
import sys
from pkg_resources import iter_entry_points
from marv_node.io import Abort
from marv_node.io import create_group
from marv_node.io import create_stream
from marv_node.io import fork
from marv_node.io import get_logger
from marv_node.io import get_requested
from marv_node.io import get_stream
from marv_node.io import make_file
from marv_node.io import pull
from marv_node.io import pull_all
from marv_node.io import push
from marv_node.io import set_header
from marv_node.node import input, node
from marv_node.tools import select
from marv_webapi.tooling import api_endpoint
from marv_webapi.tooling import api_group
__all__ = [
'Abort',
'api_endpoint',
'api_group',
'create_group',
'create_stream',
'fork',
'get_logger',
'get_requested',
'get_stream',
'input',
'make_file',
'node',
'pull',
'pull_all',
'push',
'select',
'set_header',
]
MODULE = sys.modules[__name__]
for ep in iter_entry_points(group='marv_deco'):
assert not hasattr(MODULE, ep.name)
setattr(MODULE, ep.name, ep.load())
del MODULE
| # Copyright 2016 - 2018 Ternaris.
# SPDX-License-Identifier: AGPL-3.0-only
from marv_node.io import Abort
from marv_node.io import create_group
from marv_node.io import create_stream
from marv_node.io import fork
from marv_node.io import get_logger
from marv_node.io import get_requested
from marv_node.io import get_stream
from marv_node.io import make_file
from marv_node.io import pull
from marv_node.io import pull_all
from marv_node.io import push
from marv_node.io import set_header
from marv_node.node import input, node
from marv_node.tools import select
from marv_webapi.tooling import api_endpoint
from marv_webapi.tooling import api_group
__all__ = [
'Abort',
'api_endpoint',
'api_group',
'create_group',
'create_stream',
'fork',
'get_logger',
'get_requested',
'get_stream',
'input',
'make_file',
'node',
'pull',
'pull_all',
'push',
'select',
'set_header',
]
| Drop unused support to add decorators via entry points | Drop unused support to add decorators via entry points
| Python | agpl-3.0 | ternaris/marv-robotics,ternaris/marv-robotics |
0a6e82485d4c4657efae629501f14c28c9287f48 | collectd_haproxy/compat.py | collectd_haproxy/compat.py | import sys
PY3 = sys.version_info >= (3,)
def iteritems(dictionary):
if PY3:
return dictionary.items()
return dictionary.iteritems()
def coerce_long(string):
if not PY3:
return long(string)
return int(string)
| import sys
PY3 = sys.version_info >= (3,)
def iteritems(dictionary):
if PY3:
return dictionary.items()
return dictionary.iteritems()
def coerce_long(string):
if not PY3:
return long(string) # noqa
return int(string)
| Fix flake8 test for the coersion function | Fix flake8 test for the coersion function
| Python | mit | wglass/collectd-haproxy |
4c52b8f63fea11278536ec6800305b01d9bd02a8 | blazar/plugins/dummy_vm_plugin.py | blazar/plugins/dummy_vm_plugin.py | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.plugins import base
class DummyVMPlugin(base.BasePlugin):
"""Plugin for VM resource that does nothing."""
resource_type = 'virtual:instance'
title = 'Dummy VM Plugin'
description = 'This plugin does nothing.'
def reserve_resource(self, reservation_id, values):
return None
def update_reservation(self, reservation_id, values):
return None
def on_start(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be waked up this moment.' % resource_id
def on_end(self, resource_id):
"""Dummy VM plugin does nothing."""
return 'VM %s should be deleted this moment.' % resource_id
| Add update_reservation to dummy plugin | Add update_reservation to dummy plugin
update_reservation is now an abstract method. It needs to be added to
all plugins.
Change-Id: I921878bd5233613b804b17813af1aac5bdfed9e7
(cherry picked from commit 1dbc30202bddfd4f03bdc9a8005de3c363d2ac1d)
| Python | apache-2.0 | ChameleonCloud/blazar,ChameleonCloud/blazar |
15b4f0c587bdd5772718d9d75ff5654d9b835ae5 | righteous/config.py | righteous/config.py | # coding: utf-8
"""
righteous.config
Settings object, lifted from https://github.com/kennethreitz/requests
"""
from requests.config import Settings
class RighteousSettings(Settings):
pass
settings = RighteousSettings()
settings.debug = False
settings.cookies = None
settings.username = None
settings.password = None
settings.account_id = None
| # coding: utf-8
"""
righteous.config
Settings object, lifted from https://github.com/kennethreitz/requests
"""
class Settings(object):
_singleton = {}
# attributes with defaults
__attrs__ = []
def __init__(self, **kwargs):
super(Settings, self).__init__()
self.__dict__ = self._singleton
def __call__(self, *args, **kwargs):
# new instance of class to call
r = self.__class__()
# cache previous settings for __exit__
r.__cache = self.__dict__.copy()
map(self.__cache.setdefault, self.__attrs__)
# set new settings
self.__dict__.update(*args, **kwargs)
return r
def __enter__(self):
pass
def __exit__(self, *args):
# restore cached copy
self.__dict__.update(self.__cache.copy())
del self.__cache
def __getattribute__(self, key):
if key in object.__getattribute__(self, '__attrs__'):
try:
return object.__getattribute__(self, key)
except AttributeError:
return None
return object.__getattribute__(self, key)
settings = Settings()
settings.debug = False
settings.cookies = None
settings.username = None
settings.password = None
settings.account_id = None
| Copy the settings class from an old requests version | Copy the settings class from an old requests version
| Python | unlicense | michaeljoseph/righteous,michaeljoseph/righteous |
157197b330360ccfeaa0bbf54453702ee17d0106 | Code/Python/Kamaelia/Kamaelia/Device/__init__.py | Code/Python/Kamaelia/Kamaelia/Device/__init__.py | # Needed to allow import
#
# Copyright (C) 2006 British Broadcasting Corporation and Kamaelia Contributors(1)
# All Rights Reserved.
#
# You may only modify and redistribute this under the terms of any of the
# following licenses(2): Mozilla Public License, V1.1, GNU General
# Public License, V2.0, GNU Lesser General Public License, V2.1
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://kamaelia.sourceforge.net/AUTHORS - please extend this file,
# not this notice.
# (2) Reproduced in the COPYING file, and at:
# http://kamaelia.sourceforge.net/COPYING
# Under section 3.5 of the MPL, we are using this text since we deem the MPL
# notice inappropriate for this file. As per MPL/GPL/LGPL removal of this
# notice is prohibited.
#
# Please contact us via: [email protected]
# to discuss alternative licensing.
# -------------------------------------------------------------------------
"""
This is a doc string, will it be of use?
"""
# RELEASE: MH, MPS | # -*- coding: utf-8 -*-
# Needed to allow import
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""
This is a doc string, will it be of use?
"""
# RELEASE: MH, MPS | Change license to Apache 2 | Change license to Apache 2 | Python | apache-2.0 | sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia |
1d09e197c02899bf33f4e30aef04e91cfe0dcbca | dbmigrator/commands/rollback.py | dbmigrator/commands/rollback.py | # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
"""Rollback a migration."""
from .. import utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', steps=1,
db_connection_string='', **kwargs):
migrated_versions = list(utils.get_schema_versions(
cursor, include_deferred=False))
print('migrated_versions: {}'.format(migrated_versions))
if not migrated_versions:
print('No migrations to roll back.')
return
migrations = utils.get_migrations(
migrations_directory, import_modules=True, reverse=True)
rolled_back = 0
for version, migration_name, migration in migrations:
if not migrated_versions:
break
last_version = migrated_versions[-1]
if version == last_version:
utils.compare_schema(db_connection_string,
utils.rollback_migration,
cursor,
version,
migration_name,
migration)
rolled_back += 1
migrated_versions.pop()
if rolled_back >= steps:
break
if not rolled_back:
print('No migrations to roll back.')
def cli_loader(parser):
parser.add_argument('--steps', metavar='N', default=1, type=int,
help='Roll back the last N migrations, default 1')
return cli_command
| # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
"""Rollback a migration."""
from .. import logger, utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', steps=1,
db_connection_string='', **kwargs):
migrated_versions = list(utils.get_schema_versions(
cursor, include_deferred=False))
logger.debug('migrated_versions: {}'.format(migrated_versions))
if not migrated_versions:
print('No migrations to roll back.')
return
migrations = utils.get_migrations(
migrations_directory, import_modules=True, reverse=True)
rolled_back = 0
for version, migration_name, migration in migrations:
if not migrated_versions:
break
last_version = migrated_versions[-1]
if version == last_version:
utils.compare_schema(db_connection_string,
utils.rollback_migration,
cursor,
version,
migration_name,
migration)
rolled_back += 1
migrated_versions.pop()
if rolled_back >= steps:
break
if not rolled_back:
print('No migrations to roll back.')
def cli_loader(parser):
parser.add_argument('--steps', metavar='N', default=1, type=int,
help='Roll back the last N migrations, default 1')
return cli_command
| Change print statement to logger.debug | Change print statement to logger.debug
| Python | agpl-3.0 | karenc/db-migrator |
6f60f6257cbcd0328fcdb0873d88d55772731ba4 | api/app.py | api/app.py | from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
if doc == {}:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
| from flask import Flask
from flask import request
from flask import jsonify
from y_text_recommender_system.recommender import recommend
app = Flask(__name__)
class InvalidUsage(Exception):
status_code = 400
def __init__(self, message, payload=None):
Exception.__init__(self)
self.message = message
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['message'] = self.message
return rv
@app.errorhandler(InvalidUsage)
def handle_invalid_usage(error):
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
@app.route('/recommender/', methods=['POST'])
def recommender():
content = request.get_json()
if content is not None:
doc = content.get('doc', {})
docs = content.get('docs', [])
_verify_parameters(doc, docs)
result = recommend(doc, docs)
return jsonify(result)
else:
msg = 'You need to send the parameters: doc and docs'
raise InvalidUsage(msg)
def _verify_parameters(doc, docs):
if doc == {}:
msg = 'The parameter `doc` is missing or empty'
raise InvalidUsage(msg)
if len(docs) == 0:
msg = 'The parameter `docs` is missing or empty'
raise InvalidUsage(msg)
| Refactor to separate the function to clean the data | Refactor to separate the function to clean the data
| Python | mit | joaojunior/y_text_recommender_system |
2cc51dd426f53b699a544ef34984dc9efdfe03cc | debugger/urls.py | debugger/urls.py | from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('debugger.views',
url(r'^$', 'index', name='index'),
url(r'^settings$', 'settings', name='settings'),
url(r'^scenario/(?P<scenario_id>[^/]+)$', 'show_scenario', name='scenario'),
url(r'^resource/(?P<package>[^/]+)/(?P<resource>.*)/?', 'package_resource', name='package_resource'),
url(r'^handler/(?P<usage_id>[^/]+)/(?P<handler>[^/]*)', 'handler', name='handler'),
# Examples:
# url(r'^$', 'debugger.views.home', name='home'),
# url(r'^debugger/', include('debugger.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('debugger.views',
url(r'^$', 'index', name='index'),
url(r'^settings/$', 'settings', name='settings'),
url(r'^scenario/(?P<scenario_id>[^/]+)/$', 'show_scenario', name='scenario'),
url(r'^resource/(?P<package>[^/]+)/(?P<resource>[^/]*)/$', 'package_resource', name='package_resource'),
url(r'^handler/(?P<usage_id>[^/]+)/(?P<handler>[^/]*)/$', 'handler', name='handler'),
# Examples:
# url(r'^$', 'debugger.views.home', name='home'),
# url(r'^debugger/', include('debugger.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += staticfiles_urlpatterns()
| Fix trailing slashes in URLconfs | Fix trailing slashes in URLconfs
| Python | apache-2.0 | lovehhf/xblock-sdk,lovehhf/XBlock,stvstnfrd/xblock-sdk,edx-solutions/XBlock,Lyla-Fischer/xblock-sdk,edx/xblock-sdk,lovehhf/xblock-sdk,Pilou81715/hackathon_edX,EDUlib/XBlock,cpennington/XBlock,nagyistoce/edx-XBlock,edx-solutions/XBlock,edx-solutions/xblock-sdk,edx/xblock-sdk,nagyistoce/edx-xblock-sdk,Pilou81715/hackathon_edX,edx-solutions/xblock-sdk,nagyistoce/edx-xblock-sdk,nagyistoce/edx-XBlock,jamiefolsom/xblock-sdk,Lyla-Fischer/xblock-sdk,edx-solutions/xblock-sdk,edx/xblock-sdk,Lyla-Fischer/xblock-sdk,dcadams/xblock-sdk,edx/XBlock,edx-solutions/xblock-sdk,open-craft/XBlock,Pilou81715/hackathon_edX,Pilou81715/hackathon_edX,mitodl/XBlock,4eek/XBlock,dcadams/xblock-sdk,lovehhf/xblock-sdk,EDUlib/XBlock,jamiefolsom/xblock-sdk,open-craft/XBlock,stvstnfrd/xblock-sdk,nagyistoce/edx-xblock-sdk,lovehhf/xblock-sdk,4eek/XBlock,mitodl/XBlock,edx/XBlock,lovehhf/XBlock,jamiefolsom/xblock-sdk,stvstnfrd/xblock-sdk,cpennington/XBlock,nagyistoce/edx-xblock-sdk,jamiefolsom/xblock-sdk,cpennington/XBlock,dcadams/xblock-sdk |
7785d3129d089ce99aee340b3a72fd78d7e8f556 | send.py | send.py | import os
sender = str(raw_input("Your Username: "))
target = str(raw_input("Target's Username: "))
message = str(raw_input("Message: "))
#Messages are encoded like so "senderProgramVx.x##target##sender##message"
#Example: "linuxV1.8##person87##NickGeek##Hey mate! What do you think of this WiN thing?"
formattedMessage = "linuxVpre.release##"+target+"##"+sender+"##"+message
#Write to file
messageFile = open('msg.txt', 'w+')
messageFile.write(formattedMessage)
messageFile.close()
os.system("python server.py") | import os
if os.path.exists("account.conf") is False:
sender = str(raw_input("Your Username: "))
accountFile = open('account.conf', 'w+')
accountFile.write(sender)
accountFile.close()
else:
accountFile = open('account.conf', 'r')
sender = accountFile.read()
accountFile.close()
target = str(raw_input("Target's Username: "))
message = str(raw_input("Message: "))
#Messages are encoded like so "senderProgramVx.x##target##sender##message"
#Example: "linuxV1.8##person87##NickGeek##Hey mate! What do you think of this WiN thing?"
formattedMessage = "linuxVpre.release##"+target+"##"+sender+"##"+message
#Write to file
messageFile = open('msg.txt', 'w+')
messageFile.write(formattedMessage)
messageFile.close()
os.system("python server.py") | Store your username in a file | Store your username in a file
| Python | mit | NickGeek/WiN,NickGeek/WiN,NickGeek/WiN |
42e4f42901872433f90dd84d5acf04fec76ab7f3 | curious/commands/exc.py | curious/commands/exc.py | class CommandsError(Exception):
pass
class CheckFailureError(Exception):
def __init__(self, ctx, check):
self.ctx = ctx
self.check = check
def __repr__(self):
if isinstance(self.check, list):
return "The checks for {.name} failed.".format(self.ctx)
return "The check {.__name__} for {.name} failed.".format(self.check, self.ctx)
__str__ = __repr__
class MissingArgumentError(Exception):
def __init__(self, ctx, arg):
self.ctx = ctx
self.arg = arg
def __repr__(self):
return "Missing required argument {} in {.name}.".format(self.arg, self.ctx)
__str__ = __repr__
class CommandInvokeError(Exception):
def __init__(self, ctx):
self.ctx = ctx
def __repr__(self):
return "Command {.name} failed to invoke with error {}".format(self.ctx, self.__cause__)
__str__ = __repr__
class ConversionFailedError(Exception):
def __init__(self, ctx, arg: str, to_type: type):
self.ctx = ctx
self.arg = arg
self.to_type = to_type
def __repr__(self):
return "Cannot convert {} to type {.__name__}".format(self.arg, self.to_type)
__str__ = __repr__
| class CommandsError(Exception):
pass
class CheckFailureError(Exception):
def __init__(self, ctx, check):
self.ctx = ctx
self.check = check
def __repr__(self):
if isinstance(self.check, list):
return "The checks for `{.name}` failed.".format(self.ctx)
return "The check `{.__name__}` for `{.name}` failed.".format(self.check, self.ctx)
__str__ = __repr__
class MissingArgumentError(Exception):
def __init__(self, ctx, arg):
self.ctx = ctx
self.arg = arg
def __repr__(self):
return "Missing required argument `{}` in `{.name}`.".format(self.arg, self.ctx)
__str__ = __repr__
class CommandInvokeError(Exception):
def __init__(self, ctx):
self.ctx = ctx
def __repr__(self):
return "Command {.name} failed to invoke with error `{}`.".format(self.ctx, self.__cause__)
__str__ = __repr__
class ConversionFailedError(Exception):
def __init__(self, ctx, arg: str, to_type: type):
self.ctx = ctx
self.arg = arg
self.to_type = to_type
def __repr__(self):
return "Cannot convert `{}` to type `{.__name__}`.".format(self.arg, self.to_type)
__str__ = __repr__
| Add better __repr__s for commands errors. | Add better __repr__s for commands errors.
| Python | mit | SunDwarf/curious |
d8179c0006fb5b9983898e4cd93ffacfe3fdd54f | caminae/mapentity/templatetags/convert_tags.py | caminae/mapentity/templatetags/convert_tags.py | import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype | Support conversion format as extension, instead of mimetype
| Python | bsd-2-clause | makinacorpus/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,johan--/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,camillemonchicourt/Geotrek,mabhub/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek |
e853e0137e59314f5101c178c74fd626984c34ac | pygraphc/similarity/LogTextSimilarity.py | pygraphc/similarity/LogTextSimilarity.py | from pygraphc.preprocess.PreprocessLog import PreprocessLog
from pygraphc.similarity.StringSimilarity import StringSimilarity
from itertools import combinations
class LogTextSimilarity(object):
"""A class for calculating cosine similarity between a log pair. This class is intended for
non-graph based clustering method.
"""
def __init__(self, logtype, logfile):
"""The constructor of class LogTextSimilarity.
Parameters
----------
logtype : str
Type for event log, e.g., auth, syslog, etc.
logfile : str
Log filename.
"""
self.logtype = logtype
self.logfile = logfile
def get_cosine_similarity(self):
"""Get cosine similarity from a pair of log lines in a file.
Returns
-------
cosine_similarity : dict
Dictionary of cosine similarity in non-graph clustering. Key: (log_id1, log_id2),
value: cosine similarity distance.
"""
preprocess = PreprocessLog(self.logtype, self.logfile)
preprocess.preprocess_text()
events = preprocess.events_text
# calculate cosine similarity
cosines_similarity = {}
for log_pair in combinations(preprocess.loglength, 2):
cosines_similarity[log_pair] = StringSimilarity.get_cosine_similarity(events[log_pair[0]]['tf-idf'],
events[log_pair[1]]['tf-idf'],
events[log_pair[0]]['length'],
events[log_pair[1]]['length'])
return cosines_similarity
| from pygraphc.preprocess.PreprocessLog import PreprocessLog
from pygraphc.similarity.StringSimilarity import StringSimilarity
from itertools import combinations
class LogTextSimilarity(object):
"""A class for calculating cosine similarity between a log pair. This class is intended for
non-graph based clustering method.
"""
def __init__(self, logtype, logs):
"""The constructor of class LogTextSimilarity.
Parameters
----------
logtype : str
Type for event log, e.g., auth, syslog, etc.
logs : list
List of every line of original logs.
"""
self.logtype = logtype
self.logs = logs
def get_cosine_similarity(self):
"""Get cosine similarity from a pair of log lines in a file.
Returns
-------
cosine_similarity : dict
Dictionary of cosine similarity in non-graph clustering. Key: (log_id1, log_id2),
value: cosine similarity distance.
"""
preprocess = PreprocessLog(self.logtype)
preprocess.preprocess_text(self.logs)
events = preprocess.events_text
# calculate cosine similarity
cosines_similarity = {}
for log_pair in combinations(range(preprocess.loglength), 2):
cosines_similarity[log_pair] = StringSimilarity.get_cosine_similarity(events[log_pair[0]]['tf-idf'],
events[log_pair[1]]['tf-idf'],
events[log_pair[0]]['length'],
events[log_pair[1]]['length'])
return cosines_similarity
| Change input from previous processing not from a file | Change input from previous processing not from a file
| Python | mit | studiawan/pygraphc |
921ce44cd766540d74b8496029e871d5aceb5cbb | urls.py | urls.py | from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from filebrowser.sites import site
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('uqam.cat.urls')),
url(r'^admin/filebrowser/', include(site.urls)),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include('uqam.dashboard.urls')),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^collection/', include('uqamcollections.urls')),
(r'^search/', include('haystack.urls')),
url(r'^accounts/login/$',
'django.contrib.auth.views.login',
name='auth_login'),
url(r'^accounts/logout/$',
'django.contrib.auth.views.logout',
{'next_page': '/', 'redirect_field_name': 'next'},
name='auth_logout'),
url(r'^report/', include('reports.urls')),
url(r'^', include('common.urls')),
url(r'^place/', include('location.urls')),
url(r'^mediaman/', include('mediaman.urls')),
)
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url('', include('uqam.cat.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^dashboard/', include('uqam.dashboard.urls')),
url(r'^grappelli/', include('grappelli.urls')),
url(r'^collection/', include('uqamcollections.urls')),
(r'^search/', include('haystack.urls')),
url(r'^accounts/login/$',
'django.contrib.auth.views.login',
name='auth_login'),
url(r'^accounts/logout/$',
'django.contrib.auth.views.logout',
{'next_page': '/', 'redirect_field_name': 'next'},
name='auth_logout'),
url(r'^report/', include('reports.urls')),
url(r'^', include('common.urls')),
url(r'^place/', include('location.urls')),
url(r'^mediaman/', include('mediaman.urls')),
)
if settings.DEBUG:
from django.conf.urls.static import static
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| Remove last bits of file browser stuff | Remove last bits of file browser stuff | Python | bsd-3-clause | uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam |
86a5fff9add5980892c90a3e34476802dec7a6dc | jsonschema/tests/_helpers.py | jsonschema/tests/_helpers.py | def bug(issue=None):
message = "A known bug."
if issue is not None:
message += " See issue #{issue}.".format(issue=issue)
return message
| from urllib.parse import urljoin
def issues_url(organization, repository):
return urljoin(
urljoin(
urljoin("https://github.com", organization),
repository,
),
"issues",
)
ISSUES_URL = issues_url("python-jsonschema", "jsonschema")
TEST_SUITE_ISSUES_URL = issues_url("json-schema-org", "JSON-Schema-Test-Suite")
def bug(issue=None):
message = "A known bug."
if issue is not None:
message += f" See {urljoin(ISSUES_URL, str(issue))}."
return message
def test_suite_bug(issue):
return (
"A known test suite bug. "
f"See {urljoin(TEST_SUITE_ISSUES_URL, str(issue))}."
)
| Improve the internal skipped-test helper messages. | Improve the internal skipped-test helper messages.
| Python | mit | python-jsonschema/jsonschema |
bf96bf9d71f432f2db75b0c62b49098235d75661 | cryptography/bindings/openssl/pkcs12.py | cryptography/bindings/openssl/pkcs12.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
INCLUDES = """
#include <openssl/pkcs12.h>
"""
TYPES = """
typedef ... PKCS12;
"""
FUNCTIONS = """
int PKCS12_parse(PKCS12 *, const char *, EVP_PKEY **, X509 **,
struct stack_st_X509 **);
PKCS12 *PKCS12_create(char *, char *, EVP_PKEY *, X509 *,
struct stack_st_X509 *, int, int, int, int, int);
void PKCS12_free(PKCS12 *);
PKCS12 *d2i_PKCS12_bio(BIO *, PKCS12 **);
int i2d_PKCS12_bio(BIO *, PKCS12 *);
"""
MACROS = """
"""
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
INCLUDES = """
#include <openssl/pkcs12.h>
"""
TYPES = """
typedef ... PKCS12;
"""
FUNCTIONS = """
void PKCS12_free(PKCS12 *);
PKCS12 *d2i_PKCS12_bio(BIO *, PKCS12 **);
int i2d_PKCS12_bio(BIO *, PKCS12 *);
"""
MACROS = """
int PKCS12_parse(PKCS12 *, const char *, EVP_PKEY **, X509 **,
struct stack_st_X509 **);
PKCS12 *PKCS12_create(char *, char *, EVP_PKEY *, X509 *,
struct stack_st_X509 *, int, int, int, int, int);
"""
| Move these to macros, the exact type of these functions changes by deifne | Move these to macros, the exact type of these functions changes by deifne
| Python | bsd-3-clause | Lukasa/cryptography,kimvais/cryptography,skeuomorf/cryptography,sholsapp/cryptography,dstufft/cryptography,bwhmather/cryptography,glyph/cryptography,dstufft/cryptography,bwhmather/cryptography,kimvais/cryptography,dstufft/cryptography,Ayrx/cryptography,Lukasa/cryptography,skeuomorf/cryptography,sholsapp/cryptography,Hasimir/cryptography,kimvais/cryptography,skeuomorf/cryptography,Ayrx/cryptography,skeuomorf/cryptography,bwhmather/cryptography,Hasimir/cryptography,bwhmather/cryptography,sholsapp/cryptography,Hasimir/cryptography,Lukasa/cryptography,dstufft/cryptography,Ayrx/cryptography,Ayrx/cryptography,sholsapp/cryptography,Hasimir/cryptography,glyph/cryptography,kimvais/cryptography,dstufft/cryptography |
bcaf887ccad40adf2cb09627c12f2a3e1b4b006d | redis_cache/client/__init__.py | redis_cache/client/__init__.py | # -*- coding: utf-8 -*-
from .default import DefaultClient
from .sharded import ShardClient
from .herd import HerdClient
from .experimental import SimpleFailoverClient
from .sentinel import SentinelClient
__all__ = ['DefaultClient', 'ShardClient',
'HerdClient', 'SimpleFailoverClient',
'SentinelClient']
| # -*- coding: utf-8 -*-
import warnings
from .default import DefaultClient
from .sharded import ShardClient
from .herd import HerdClient
from .experimental import SimpleFailoverClient
__all__ = ['DefaultClient', 'ShardClient',
'HerdClient', 'SimpleFailoverClient',]
try:
from .sentinel import SentinelClient
__all__.append("SentinelClient")
except ImportError:
warnings.warn("sentinel client is unsuported with redis-py<2.9",
RuntimeWarning)
| Disable Sentinel client with redis-py < 2.9 | Disable Sentinel client with redis-py < 2.9
| Python | bsd-3-clause | zl352773277/django-redis,smahs/django-redis,yanheng/django-redis,lucius-feng/django-redis,GetAmbassador/django-redis |
a99dd63f357548cc4eef5121e3a2da9dfd6b7a01 | education/test/test_absenteeism_form.py | education/test/test_absenteeism_form.py | # vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from unittest import TestCase
from education.views import AbsenteeismForm
from datetime import datetime
class TestAbsenteeismForm(TestCase):
def test_should_invalidate_empty_form(self):
absenteeism_form = AbsenteeismForm(data={})
self.assertFalse(absenteeism_form.is_valid())
def test_should_validate_if_to_date_is_greater_than_from_date(self):
absenteeism_form = AbsenteeismForm(data={'to_date':'12/12/2013', 'from_date':'12/14/2013'})
self.assertFalse(absenteeism_form.is_valid())
def test_should_get_cleaned_data_after_validation(self):
absenteeism_form = AbsenteeismForm(data={'to_date':'12/21/2013', 'from_date':'12/14/2013', 'indicator':'all'})
self.assertTrue(absenteeism_form.is_valid())
self.assertEqual(datetime(2013,12,21), absenteeism_form.cleaned_data['to_date'])
self.assertEqual(datetime(2013,12,14), absenteeism_form.cleaned_data['from_date'])
self.assertEqual('all', absenteeism_form.cleaned_data['indicator'])
| # vim: ai ts=4 sts=4 et sw=4 encoding=utf-8
from unittest import TestCase
from education.views import AbsenteeismForm
from datetime import datetime
class TestAbsenteeismForm(TestCase):
def test_should_invalidate_empty_form(self):
absenteeism_form = AbsenteeismForm(data={})
self.assertFalse(absenteeism_form.is_valid())
def test_should_validate_if_to_date_is_greater_than_from_date(self):
absenteeism_form = AbsenteeismForm(data={'to_date':'12/12/2012', 'from_date':'12/14/2012'})
self.assertFalse(absenteeism_form.is_valid())
def test_should_get_cleaned_data_after_validation(self):
absenteeism_form = AbsenteeismForm(data={'to_date':'12/21/2012', 'from_date':'12/14/2012', 'indicator':'all'})
self.assertTrue(absenteeism_form.is_valid())
self.assertEqual(datetime(2012,12,21), absenteeism_form.cleaned_data['to_date'])
self.assertEqual(datetime(2012,12,14), absenteeism_form.cleaned_data['from_date'])
self.assertEqual('all', absenteeism_form.cleaned_data['indicator'])
| Put dates in the past so that we have no chance of having them spontaneously fail. | Put dates in the past so that we have no chance of having them spontaneously fail.
| Python | bsd-3-clause | unicefuganda/edtrac,unicefuganda/edtrac,unicefuganda/edtrac |
684ac5e6e6011581d5abcb42a7c0e54742f20606 | Arduino/IMUstream_WifiUDP_iot33/read_UDP_JSON_IMU.py | Arduino/IMUstream_WifiUDP_iot33/read_UDP_JSON_IMU.py | # -------------------------------------------------------
import socket, traceback
import time
import json
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
print(message)
json.loads(message.decode("utf-8"))
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
| # -------------------------------------------------------
import socket, traceback
import time
import json
import numpy as np
from scipy.spatial.transform import Rotation as R
host = ''
port = 2390
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.bind((host, port))
filein = open('saveUDP.txt', 'w')
t0 = time.time()
# Place IMU x-axis into wind going direction when launching script
is_init_done = False
wind_yaw = 0
while time.time()-t0 < 200:
try:
message, address = s.recvfrom(4096)
#print(message)
msg = json.loads(message.decode("utf-8"))
if is_init_done==False:
wind_yaw = msg["Yaw"]
is_init_done = True
msg['Yaw'] = msg['Yaw']-wind_yaw
print(msg)
ypr = [msg['Yaw'], msg['Pitch'], msg['Roll']]
seq = 'ZYX' # small letters from intrinsic rotations
r = R.from_euler(seq, ypr, degrees=True)
# Compute coordinates in NED (could be useful to compare position with GPS position for example)
line_length = 10
base_to_kite = [0, 0, line_length]
base_to_kite_in_NED = r.apply(base_to_kite)
# Express kite coordinates as great roll, great pitch and small yaw angles
grpy=r.as_euler(seq="XYZ")
print(grpy*180/np.pi)
filein.write('%s\n' % (message))
except (KeyboardInterrupt, SystemExit):
raise
except:
traceback.print_exc()
filein.close()
# -------------------------------------------------------
| Add computations of great roll, pitch and small yaw angle (kite angles) | Add computations of great roll, pitch and small yaw angle (kite angles)
| Python | mit | baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite |
0254ad22680d32a451d1faf4b21809394a399311 | packages/pegasus-python/src/Pegasus/cli/startup-validation.py | packages/pegasus-python/src/Pegasus/cli/startup-validation.py | #!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
pass
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
| #!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml # noqa
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
| Add noqa comment so unused import does not get removed by code lint steps | Add noqa comment so unused import does not get removed by code lint steps
| Python | apache-2.0 | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus |
6d18ff715a5fa3059ddb609c1abdbbb06b15ad63 | fuel/downloaders/celeba.py | fuel/downloaders/celeba.py | from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAB7G69NLjRNqv_tyiULHSVUa/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADVdnYbokd7TXhpvfWLL3sga/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| from fuel.downloaders.base import default_downloader
def fill_subparser(subparser):
"""Sets up a subparser to download the CelebA dataset file.
Parameters
----------
subparser : :class:`argparse.ArgumentParser`
Subparser handling the `celeba` command.
"""
urls = ['https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AAC7-uCaJkmPmvLX2_P5qy0ga/Anno/list_attr_celeba.txt?dl=1',
'https://www.dropbox.com/sh/8oqt9vytwxb3s4r/'
'AADIKlz8PR9zr6Y20qbkunrba/Img/img_align_celeba.zip?dl=1']
filenames = ['list_attr_celeba.txt', 'img_align_celeba.zip']
subparser.set_defaults(urls=urls, filenames=filenames)
return default_downloader
| Update download links for CelebA files | Update download links for CelebA files
| Python | mit | mila-udem/fuel,dmitriy-serdyuk/fuel,dmitriy-serdyuk/fuel,mila-udem/fuel,vdumoulin/fuel,vdumoulin/fuel |
e66fe8e6c79f7b29e2f334b481904f7d838a2655 | gameon/users/middleware.py | gameon/users/middleware.py | from django.core.urlresolvers import reverse, resolve
from django.http import HttpResponseRedirect
from gameon.users.models import get_profile_safely
class ProfileMiddleware(object):
@classmethod
def safe_paths(cls):
return ('users_edit', 'django.views.static.serve', 'users_signout')
def is_safe(self, path):
try:
match = resolve(path)
return match.url_name in self.__class__.safe_paths()
except:
return False
def process_request(self, request):
# django debug_toolbar
if '__debug__' in request.path:
return
if self.is_safe(request.path):
return
path = u'/%s' % ('/'.join(request.path.split('/')[2:]),)
if self.is_safe(path):
return
if request.user.is_authenticated():
profile = get_profile_safely(request.user, True)
print profile.name
if profile.has_chosen_identifier:
return
return HttpResponseRedirect(reverse('users_edit'))
| from django.core.urlresolvers import reverse, resolve
from django.http import HttpResponseRedirect
from django.conf import settings
from gameon.users.models import get_profile_safely
class ProfileMiddleware(object):
"""
This middleware will redirect a user, once signed into the site via Persona
to complete their profile, at which point they agree to the Mozilla Privacy
Policy
"""
@classmethod
def safe_paths(cls):
"""
Paths we don't need to redirect on - at this point they've either
disagreed or are in the process of agreeing so it would infinite loop
"""
return ('users_edit', 'django.views.static.serve', 'users_signout')
def is_safe(self, path):
"""
Checks the current request path is in the safe list above and if so
ignores it and returns as normal
"""
try:
match = resolve(path)
return match.url_name in self.__class__.safe_paths()
except:
return False
def process_request(self, request):
"""
if it's a request for the django debug toolbar AND we're in dev we can
ignore - this check now only applies to when the site is in dev
"""
if settings.DEBUG and '__debug__' in request.path:
return
if self.is_safe(request.path):
return
# remove the locale string - resolve won't work with it included
path = u'/%s' % ('/'.join(request.path.split('/')[2:]),)
if self.is_safe(path):
return
if request.user.is_authenticated():
profile = get_profile_safely(request.user, True)
if profile.has_chosen_identifier:
return
return HttpResponseRedirect(reverse('users_edit'))
| Fix up nits on profileMiddleware as noticed in bugzilla-813182 | Fix up nits on profileMiddleware as noticed in bugzilla-813182
| Python | bsd-3-clause | mozilla/gameon,mozilla/gameon,mozilla/gameon,mozilla/gameon |
e818860af87cad796699e27f8dfb4ff6fc9354e8 | h2o-py/h2o/model/autoencoder.py | h2o-py/h2o/model/autoencoder.py | """
AutoEncoder Models
"""
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"]) | """
AutoEncoder Models
"""
from model_base import *
from metrics_base import *
class H2OAutoEncoderModel(ModelBase):
"""
Class for AutoEncoder models.
"""
def __init__(self, dest_key, model_json):
super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics)
def anomaly(self,test_data,per_feature=False):
"""
Obtain the reconstruction error for the input test_data.
:param test_data: The dataset upon which the reconstruction error is computed.
:param per_feature: Whether to return the square reconstruction error per feature. Otherwise, return the mean square error.
:return: Return the reconstruction error.
"""
if not test_data: raise ValueError("Must specify test data")
j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature)
return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"]) | Add extra argument to get per-feature reconstruction error for anomaly detection from Python. | PUBDEV-2078: Add extra argument to get per-feature reconstruction error for
anomaly detection from Python.
| Python | apache-2.0 | kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,datachand/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,brightchen/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,printedheart/h2o-3,pchmieli/h2o-3,madmax983/h2o-3,YzPaul3/h2o-3,datachand/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,junwucs/h2o-3,pchmieli/h2o-3,datachand/h2o-3,junwucs/h2o-3,mathemage/h2o-3,h2oai/h2o-3,printedheart/h2o-3,junwucs/h2o-3,kyoren/https-github.com-h2oai-h2o-3,YzPaul3/h2o-3,madmax983/h2o-3,michalkurka/h2o-3,junwucs/h2o-3,printedheart/h2o-3,datachand/h2o-3,pchmieli/h2o-3,michalkurka/h2o-3,printedheart/h2o-3,brightchen/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,madmax983/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,brightchen/h2o-3,spennihana/h2o-3,junwucs/h2o-3,mathemage/h2o-3,printedheart/h2o-3,madmax983/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,datachand/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,jangorecki/h2o-3,madmax983/h2o-3,junwucs/h2o-3,jangorecki/h2o-3,h2oai/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,pchmieli/h2o-3,junwucs/h2o-3,mathemage/h2o-3,datachand/h2o-3,kyoren/https-github.com-h2oai-h2o-3,jangorecki/h2o-3,spennihana/h2o-3,spennihana/h2o-3,spennihana/h2o-3,madmax983/h2o-3,spennihana/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,brightchen/h2o-3,pchmieli/h2o-3,h2oai/h2o-dev,brightchen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,michalkurka/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,printedheart/h2o-3 |
ea1c095fb12c4062616ee0d38818ab1baaabd1eb | ipywidgets/widgets/tests/test_widget_upload.py | ipywidgets/widgets/tests/test_widget_upload.py | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
from traitlets import TraitError
from ipywidgets import FileUpload
class TestFileUpload(TestCase):
def test_construction(self):
uploader = FileUpload()
# Default
assert uploader.accept == ''
assert not uploader.multiple
assert not uploader.disabled
def test_construction_with_params(self):
uploader = FileUpload(
accept='.txt', multiple=True, disabled=True)
assert uploader.accept == '.txt'
assert uploader.multiple
assert uploader.disabled
def test_empty_initial_value(self):
uploader = FileUpload()
assert uploader.value == []
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
from traitlets import TraitError
from ipywidgets import FileUpload
class TestFileUpload(TestCase):
def test_construction(self):
uploader = FileUpload()
# Default
assert uploader.accept == ''
assert not uploader.multiple
assert not uploader.disabled
def test_construction_with_params(self):
uploader = FileUpload(
accept='.txt', multiple=True, disabled=True)
assert uploader.accept == '.txt'
assert uploader.multiple
assert uploader.disabled
def test_empty_initial_value(self):
uploader = FileUpload()
assert uploader.value == []
def test_receive_single_file(self):
uploader = FileUpload()
content = memoryview(b"file content")
message = {
"value": [
{
"name": "file-name.txt",
"type": "text/plain",
"size": 20760,
"lastModified": 1578578296434,
"error": "",
"content": content,
}
]
}
uploader.set_state(message)
assert len(uploader.value) == 1
[uploaded_file] = uploader.value
assert uploaded_file.name == "file-name.txt"
assert uploaded_file.type == "text/plain"
assert uploaded_file.size == 20760
assert uploaded_file.content.tobytes() == b"file content"
| Test deserialization of comm message following upload | Test deserialization of comm message following upload
| Python | bsd-3-clause | ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets |
c5730d19d41f7221c4108f340d0ff8be26c24c74 | auxiliary/tag_suggestions/__init__.py | auxiliary/tag_suggestions/__init__.py | from tagging.models import Tag, TaggedItem
from django.contrib.contenttypes.models import ContentType
from auxiliary.models import TagSuggestion
from django.db import IntegrityError
def approve(admin, request, tag_suggestions):
for tag_suggestion in tag_suggestions:
object = tag_suggestion.object
try:
tag = Tag.objects.create(name=tag_suggestion.name)
TaggedItem.objects.create(tag=tag, object=object)
except IntegrityError as e:
if str(e) != 'column name is not unique':
raise
tag_suggestion.delete()
| from tagging.models import Tag, TaggedItem
from django.contrib.contenttypes.models import ContentType
def approve(admin, request, tag_suggestions):
for tag_suggestion in tag_suggestions:
obj = tag_suggestion.object
ct = ContentType.objects.get_for_model(obj)
tag, t_created = Tag.objects.get_or_create(name=tag_suggestion.name)
ti, ti_created = TaggedItem.objects.get_or_create(
tag=tag, object_id=obj.pk, content_type=ct)
tag_suggestion.delete()
| Make tag_suggestions test less flaky | Make tag_suggestions test less flaky
Failed on Python 2.7.6 as it was dependant on an error string returned
| Python | bsd-3-clause | noamelf/Open-Knesset,otadmor/Open-Knesset,habeanf/Open-Knesset,habeanf/Open-Knesset,daonb/Open-Knesset,navotsil/Open-Knesset,noamelf/Open-Knesset,navotsil/Open-Knesset,navotsil/Open-Knesset,otadmor/Open-Knesset,noamelf/Open-Knesset,otadmor/Open-Knesset,ofri/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,jspan/Open-Knesset,MeirKriheli/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,daonb/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,ofri/Open-Knesset,MeirKriheli/Open-Knesset,alonisser/Open-Knesset,DanaOshri/Open-Knesset,alonisser/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,MeirKriheli/Open-Knesset,jspan/Open-Knesset,habeanf/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,OriHoch/Open-Knesset,Shrulik/Open-Knesset,habeanf/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,noamelf/Open-Knesset,Shrulik/Open-Knesset,Shrulik/Open-Knesset,jspan/Open-Knesset,MeirKriheli/Open-Knesset,daonb/Open-Knesset,navotsil/Open-Knesset |
85b269bde76af2b8d15dc3b1e9f7cf882fc18dc2 | labcalc/tests/test_functions.py | labcalc/tests/test_functions.py | #!/usr/bin/env python3
from labcalc.run import *
from labcalc import gibson
# labcalc.gibson
def test_gibson_one_insert():
d = {'insert1': [300, 50], 'vector': [5000, 50]}
assert gibson.gibson_calc(d) == {'insert1': 0.24, 'vector': 2.0}
| #!/usr/bin/env python3
from labcalc.run import *
from labcalc import gibson
# labcalc.gibson
def test_gibson_one_insert():
d = {'vector': [5000, 50], 'insert1': [300, 50]}
assert gibson.gibson_calc(d) == {'vector': 2.0, 'insert1': 0.24}
def test_gibson_two_inserts():
d = {'vector': [5000, 50], 'insert1': [300, 50], 'insert2': [600, 50]}
assert gibson.gibson_calc(d) == {'vector': 2.0, 'insert1': 0.24, 'insert2': 0.48}
def test_gibson_four_inserts():
d = {'vector': [5000, 50],
'insert1': [300, 50], 'insert2': [600, 50], 'insert3': [300, 50], 'insert4': [600, 50]}
assert gibson.gibson_calc(d) == {'vector': 2.0, 'insert1': 0.12, 'insert2': 0.24, 'insert3': 0.12, 'insert4': 0.24}
| Add tests for multiple gibson inserts | Add tests for multiple gibson inserts
| Python | bsd-3-clause | dtarnowski16/labcalc,mandel01/labcalc,mjmlab/labcalc |
caaa807a4226bfdeb18681f8ccb6119bd2caa609 | pombola/core/context_processors.py | pombola/core/context_processors.py | from django.conf import settings
import logging
def add_settings( request ):
"""Add some selected settings values to the context"""
return {
'settings': {
'STAGING': settings.STAGING,
'STATIC_GENERATION_NUMBER': settings.STATIC_GENERATION_NUMBER,
'GOOGLE_ANALYTICS_ACCOUNT': settings.GOOGLE_ANALYTICS_ACCOUNT,
'POLLDADDY_WIDGET_ID': settings.POLLDADDY_WIDGET_ID,
'DISQUS_SHORTNAME': settings.DISQUS_SHORTNAME,
'DISQUS_USE_IDENTIFIERS': settings.DISQUS_USE_IDENTIFIERS,
'TWITTER_USERNAME': settings.TWITTER_USERNAME,
'TWITTER_WIDGET_ID': settings.TWITTER_WIDGET_ID,
'BLOG_RSS_FEED': settings.BLOG_RSS_FEED,
'ENABLED_FEATURES': settings.ENABLED_FEATURES,
'MAP_BOUNDING_BOX_NORTH': settings.MAP_BOUNDING_BOX_NORTH,
'MAP_BOUNDING_BOX_EAST': settings.MAP_BOUNDING_BOX_EAST,
'MAP_BOUNDING_BOX_SOUTH': settings.MAP_BOUNDING_BOX_SOUTH,
'MAP_BOUNDING_BOX_WEST': settings.MAP_BOUNDING_BOX_WEST,
}
}
| from django.conf import settings
import logging
def add_settings( request ):
"""Add some selected settings values to the context"""
return {
'settings': {
'STAGING': settings.STAGING,
'STATIC_GENERATION_NUMBER': settings.STATIC_GENERATION_NUMBER,
'GOOGLE_ANALYTICS_ACCOUNT': settings.GOOGLE_ANALYTICS_ACCOUNT,
'POLLDADDY_WIDGET_ID': settings.POLLDADDY_WIDGET_ID,
'DISQUS_SHORTNAME': settings.DISQUS_SHORTNAME,
'DISQUS_USE_IDENTIFIERS': settings.DISQUS_USE_IDENTIFIERS,
'TWITTER_USERNAME': settings.TWITTER_USERNAME,
'TWITTER_WIDGET_ID': settings.TWITTER_WIDGET_ID,
'BLOG_RSS_FEED': settings.BLOG_RSS_FEED,
'ENABLED_FEATURES': settings.ENABLED_FEATURES,
'COUNTRY_APP': settings.COUNTRY_APP,
'MAP_BOUNDING_BOX_NORTH': settings.MAP_BOUNDING_BOX_NORTH,
'MAP_BOUNDING_BOX_EAST': settings.MAP_BOUNDING_BOX_EAST,
'MAP_BOUNDING_BOX_SOUTH': settings.MAP_BOUNDING_BOX_SOUTH,
'MAP_BOUNDING_BOX_WEST': settings.MAP_BOUNDING_BOX_WEST,
}
}
| Add COUNTRY_APP to settings exposed to the templates | Add COUNTRY_APP to settings exposed to the templates
| Python | agpl-3.0 | hzj123/56th,mysociety/pombola,hzj123/56th,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,ken-muturi/pombola,patricmutwiri/pombola,patricmutwiri/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola |
8de4cb6e314da95b243f140f53b3c77487695a55 | tests/cyclus_tools.py | tests/cyclus_tools.py | #! /usr/bin/env python
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in zip(sim_files):
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
| #! /usr/bin/env python
from tools import check_cmd
def run_cyclus(cyclus, cwd, sim_files):
"""Runs cyclus with various inputs and creates output databases
"""
for sim_input, sim_output in sim_files:
holdsrtn = [1] # needed because nose does not send() to test generator
cmd = [cyclus, "-o", sim_output, "--input-file", sim_input]
check_cmd(cmd, cwd, holdsrtn)
rtn = holdsrtn[0]
if rtn != 0:
return # don"t execute further commands
| Correct zip() error in run_cyclus function | Correct zip() error in run_cyclus function
| Python | bsd-3-clause | Baaaaam/cyBaM,gonuke/cycamore,cyclus/cycaless,gonuke/cycamore,Baaaaam/cycamore,jlittell/cycamore,rwcarlsen/cycamore,Baaaaam/cyBaM,Baaaaam/cycamore,rwcarlsen/cycamore,jlittell/cycamore,gonuke/cycamore,jlittell/cycamore,rwcarlsen/cycamore,Baaaaam/cycamore,jlittell/cycamore,Baaaaam/cyBaM,Baaaaam/cyCLASS,cyclus/cycaless,Baaaaam/cyCLASS,gonuke/cycamore,Baaaaam/cyBaM,rwcarlsen/cycamore |
47d9a8df136e235f49921d4782c5e392b0101107 | migrations/versions/147_add_cleaned_subject.py | migrations/versions/147_add_cleaned_subject.py | """add cleaned subject
Revision ID: 486c7fa5b533
Revises: 1d7a72222b7c
Create Date: 2015-03-10 16:33:41.740387
"""
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread',
['namespace_id', '_cleaned_subject'], unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
| """add cleaned subject
Revision ID: 486c7fa5b533
Revises: 1d7a72222b7c
Create Date: 2015-03-10 16:33:41.740387
"""
# revision identifiers, used by Alembic.
revision = '486c7fa5b533'
down_revision = 'c77a90d524'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.add_column('thread', sa.Column('_cleaned_subject',
sa.String(length=255), nullable=True))
op.create_index('ix_cleaned_subject', 'thread', ['_cleaned_subject'],
unique=False)
def downgrade():
conn = op.get_bind()
conn.execute(text("set @@lock_wait_timeout = 20;"))
op.drop_index('ix_cleaned_subject', table_name='thread')
op.drop_column('thread', '_cleaned_subject')
| Make _cleaned_subject migration match declared schema. | Make _cleaned_subject migration match declared schema.
Test Plan: Upgrade old database to head.
Reviewers: kav-ya
Reviewed By: kav-ya
Differential Revision: https://review.inboxapp.com/D1394
| Python | agpl-3.0 | Eagles2F/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,jobscore/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,closeio/nylas,nylas/sync-engine,gale320/sync-engine,gale320/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,closeio/nylas,jobscore/sync-engine,gale320/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine |
8910a61025062a40a3129f7a4330964b20337ec2 | insanity/core.py | insanity/core.py | import numpy as np
import theano
import theano.tensor as T | import numpy as np
import theano
import theano.tensor as T
class NeuralNetwork(object):
def __init__(self, layers, miniBatchSize):
self.miniBatchSize = miniBatchSize
#Initialize layers.
self.layers = layers
self.numLayers = len(self.layers)
self.firstLayer = self.layers[0]
self.lastLayer = self.layers[-1]
#Populate self.learningParams with a complete list of weights and biases from all layers.
self.learningParams = []
for layer in self.layers:
for param in layer.learningParams:
self.learningParams.append(param)
#Connect each layer's input to the previous layer's output.
for i in xrange(1, self.numLayers):
nextLayer = layers[i]
previousLayer = layers[i-1]
nextLayer.input = previousLayer.output | Add first code for NeuralNetwork class. | Add first code for NeuralNetwork class.
| Python | cc0-1.0 | cn04/insanity |
cdaeb29474df423e66cbc79fffa74d937fe2193c | justitie/just/pipelines.py | justitie/just/pipelines.py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import requests
import json
from just.items import JustPublication
import logging
API_KEY = 'justitie-very-secret-key'
API_PUBLICATIONS = 'http://czl-api.code4.ro/api/publications/'
class JustPublicationsToApiPipeline(object):
def process_item(self, item, spider):
if type(item) != JustPublication:
return item
r = requests.post(API_PUBLICATIONS, json=dict(item), headers={'Authorization': 'Token %s' % (API_KEY,) } )
return item
| # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import requests
import json
import logging
from just.items import JustPublication
import logging
API_KEY = 'justitie-very-secret-key'
API_PUBLICATIONS = 'http://czl-api.code4.ro/api/publications/'
class JustPublicationsToApiPipeline(object):
def process_item(self, item, spider):
if type(item) != JustPublication:
return item
r = requests.post(API_PUBLICATIONS, json=dict(item), headers={'Authorization': 'Token %s' % (API_KEY,) } )
api_log = logging.getLogger('api-log.txt')
if r.status_code == 200 or r.status_code == '200':
api_log.log(r.status_code, level=logging.INFO)
else:
api_log.log(r.status_code, level=logging.ERROR)
api_log.log(r.content, level=logging.INFO)
return item
| Add logging for api calls. | Add logging for api calls.
| Python | mpl-2.0 | mgax/czl-scrape,margelatu/czl-scrape,costibleotu/czl-scrape,mgax/czl-scrape,code4romania/czl-scrape,lbogdan/czl-scrape,mgax/czl-scrape,lbogdan/czl-scrape,lbogdan/czl-scrape,mgax/czl-scrape,code4romania/czl-scrape,code4romania/czl-scrape,code4romania/czl-scrape,lbogdan/czl-scrape,margelatu/czl-scrape,margelatu/czl-scrape,lbogdan/czl-scrape,mgax/czl-scrape,costibleotu/czl-scrape,margelatu/czl-scrape,costibleotu/czl-scrape,costibleotu/czl-scrape |
837efcddd6c111dabf14a6017d0ae2f6aacbddac | konstrukteur/HtmlParser.py | konstrukteur/HtmlParser.py | #
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
page[meta["name"].lower()] = meta["contents"]
return page | #
# Konstrukteur - Static website generator
# Copyright 2013 Sebastian Fastner
#
__all__ = ["parse"]
from jasy.env.State import session
from jasy.core import Console
from bs4 import BeautifulSoup
def parse(filename):
""" HTML parser class for Konstrukteur """
page = {}
parsedContent = BeautifulSoup(open(filename, "rt").read())
body = parsedContent.find("body")
page["content"] = "".join([str(tag) for tag in body.contents])
page["title"] = parsedContent.title.string
page["summary"] = body.p.get_text()
for meta in parsedContent.find_all("meta"):
if not hasattr(meta, "name") or not hasattr(meta, "content"):
raise RuntimeError("Meta elements must have attributes name and content : %s" % filename)
page[meta["name"].lower()] = meta["content"]
return page | Add detection of wrong meta data | Add detection of wrong meta data
| Python | mit | fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur |
a31db91800630520c5b516493bddef76ba8b7edd | flask_oauthlib/utils.py | flask_oauthlib/utils.py | # coding: utf-8
import logging
import base64
from flask import request, Response
from oauthlib.common import to_unicode, bytes_type
log = logging.getLogger('flask_oauthlib')
def extract_params():
"""Extract request params."""
uri = request.url
http_method = request.method
headers = dict(request.headers)
if 'wsgi.input' in headers:
del headers['wsgi.input']
if 'wsgi.errors' in headers:
del headers['wsgi.errors']
if 'Http-Authorization' in headers:
headers['Authorization'] = headers['Http-Authorization']
body = request.form.to_dict()
return uri, http_method, body, headers
def decode_base64(text):
"""Decode base64 string."""
# make sure it is bytes
if not isinstance(text, bytes_type):
text = text.encode('utf-8')
return to_unicode(base64.b64decode(text), 'utf-8')
def create_response(headers, body, status):
"""Create response class for Flask."""
response = Response(body or '')
for k, v in headers.items():
response.headers[k] = v
response.status_code = status
return response
| # coding: utf-8
import logging
import base64
from flask import request, Response
from oauthlib.common import to_unicode, bytes_type
log = logging.getLogger('flask_oauthlib')
def extract_params():
"""Extract request params."""
uri = request.url
http_method = request.method
headers = dict(request.headers)
if 'wsgi.input' in headers:
del headers['wsgi.input']
if 'wsgi.errors' in headers:
del headers['wsgi.errors']
body = request.form.to_dict()
return uri, http_method, body, headers
def decode_base64(text):
"""Decode base64 string."""
# make sure it is bytes
if not isinstance(text, bytes_type):
text = text.encode('utf-8')
return to_unicode(base64.b64decode(text), 'utf-8')
def create_response(headers, body, status):
"""Create response class for Flask."""
response = Response(body or '')
for k, v in headers.items():
response.headers[k] = v
response.status_code = status
return response
| Delete useless header transform in extract_params. | Delete useless header transform in extract_params.
| Python | bsd-3-clause | auerj/flask-oauthlib,auerj/flask-oauthlib,kevin1024/flask-oauthlib,stianpr/flask-oauthlib,CoreyHyllested/flask-oauthlib,lepture/flask-oauthlib,Ryan-K/flask-oauthlib,tonyseek/flask-oauthlib,RealGeeks/flask-oauthlib,adambard/flask-oauthlib,huxuan/flask-oauthlib,PyBossa/flask-oauthlib,Fleurer/flask-oauthlib,CoreyHyllested/flask-oauthlib,RealGeeks/flask-oauthlib,brightforme/flask-oauthlib,huxuan/flask-oauthlib,icook/flask-oauthlib,adambard/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,PyBossa/flask-oauthlib,icook/flask-oauthlib,tonyseek/flask-oauthlib,brightforme/flask-oauthlib,cogniteev/flask-oauthlib,landler/flask-oauthlib,cogniteev/flask-oauthlib,stianpr/flask-oauthlib,CommonsCloud/CommonsCloud-FlaskOAuthlib,landler/flask-oauthlib,lepture/flask-oauthlib,Fleurer/flask-oauthlib,kevin1024/flask-oauthlib,Ryan-K/flask-oauthlib |
a91a04af6b95fa600a0b3ce74b5fffc07ecf590e | polymorphic/__init__.py | polymorphic/__init__.py | # -*- coding: utf-8 -*-
"""
Seamless Polymorphic Inheritance for Django Models
Copyright:
This code and affiliated files are (C) by Bert Constantin and individual contributors.
Please see LICENSE and AUTHORS for more information.
"""
# See PEP 440 (https://www.python.org/dev/peps/pep-0440/)
__version__ = "1.3"
| # -*- coding: utf-8 -*-
"""
Seamless Polymorphic Inheritance for Django Models
Copyright:
This code and affiliated files are (C) by Bert Constantin and individual contributors.
Please see LICENSE and AUTHORS for more information.
"""
import pkg_resources
__version__ = pkg_resources.require("django-polymorphic")[0].version
| Set polymorphic.__version__ from setuptools metadata | Set polymorphic.__version__ from setuptools metadata
| Python | bsd-3-clause | skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,skirsdeda/django_polymorphic,chrisglass/django_polymorphic,chrisglass/django_polymorphic |
8cb680c7fbadfe6cfc245fe1eb1261a00c5ffd6d | djmoney/forms/fields.py | djmoney/forms/fields.py | from __future__ import unicode_literals
from warnings import warn
from django.forms import MultiValueField, DecimalField, ChoiceField
from moneyed.classes import Money
from .widgets import MoneyWidget, CURRENCY_CHOICES
__all__ = ('MoneyField',)
class MoneyField(MultiValueField):
def __init__(self, currency_widget=None, currency_choices=CURRENCY_CHOICES, choices=CURRENCY_CHOICES,
max_value=None, min_value=None,
max_digits=None, decimal_places=None, *args, **kwargs):
if currency_choices != CURRENCY_CHOICES:
warn('currency_choices will be deprecated in favor of choices', PendingDeprecationWarning)
choices = currency_choices
decimal_field = DecimalField(max_value, min_value, max_digits, decimal_places, *args, **kwargs)
choice_field = ChoiceField(choices=currency_choices)
self.widget = currency_widget if currency_widget else MoneyWidget(amount_widget=decimal_field.widget,
currency_widget=choice_field.widget)
fields = (decimal_field, choice_field)
super(MoneyField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
return Money(*data_list[:2]) | from __future__ import unicode_literals
from warnings import warn
from django.forms import MultiValueField, DecimalField, ChoiceField
from moneyed.classes import Money
from .widgets import MoneyWidget, CURRENCY_CHOICES
__all__ = ('MoneyField',)
class MoneyField(MultiValueField):
def __init__(self, currency_widget=None, currency_choices=CURRENCY_CHOICES, choices=CURRENCY_CHOICES,
max_value=None, min_value=None,
max_digits=None, decimal_places=None, *args, **kwargs):
if currency_choices != CURRENCY_CHOICES:
warn('currency_choices will be deprecated in favor of choices', PendingDeprecationWarning)
choices = currency_choices
decimal_field = DecimalField(max_value, min_value, max_digits, decimal_places, *args, **kwargs)
choice_field = ChoiceField(choices=currency_choices)
self.widget = currency_widget if currency_widget else MoneyWidget(amount_widget=decimal_field.widget,
currency_widget=choice_field.widget)
fields = (decimal_field, choice_field)
super(MoneyField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
try:
if data_list[0] is None:
return None
except IndexError:
return None
return Money(*data_list[:2])
| Support for value of None in MoneyField.compress. Leaving a MoneyField blank in the Django admin site caused an issue when attempting to save an exception was raised since Money was getting an argument list of None. | Support for value of None in MoneyField.compress.
Leaving a MoneyField blank in the Django admin site caused an issue when
attempting to save an exception was raised since Money was getting an
argument list of None.
| Python | bsd-3-clause | recklessromeo/django-money,rescale/django-money,iXioN/django-money,AlexRiina/django-money,tsouvarev/django-money,iXioN/django-money,tsouvarev/django-money,recklessromeo/django-money |
98ca37ed174e281542df2f1026a298387845b524 | rmgpy/tools/data/generate/input.py | rmgpy/tools/data/generate/input.py | # Data sources for kinetics
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = 'default',
#this section lists possible reaction families to find reactioons with
kineticsFamilies = ['!Intra_Disproportionation','!Substitution_O'],
kineticsEstimator = 'rate rules',
)
# List all species you want reactions between
species(
label='ethane',
reactive=True,
structure=SMILES("CC"),
)
species(
label='H',
reactive=True,
structure=SMILES("[H]"),
)
species(
label='butane',
reactive=True,
structure=SMILES("CCCC"),
)
# you must list reactor conditions (though this may not effect the output)
simpleReactor(
temperature=(650,'K'),
pressure=(10.0,'bar'),
initialMoleFractions={
"ethane": 1,
},
terminationConversion={
'butane': .99,
},
terminationTime=(40,'s'),
) | # Data sources for kinetics
database(
thermoLibraries = ['primaryThermoLibrary'],
reactionLibraries = [],
seedMechanisms = [],
kineticsDepositories = 'default',
#this section lists possible reaction families to find reactioons with
kineticsFamilies = ['R_Recombination'],
kineticsEstimator = 'rate rules',
)
# List all species you want reactions between
species(
label='Propyl',
reactive=True,
structure=SMILES("CC[CH3]"),
)
species(
label='H',
reactive=True,
structure=SMILES("[H]"),
)
# you must list reactor conditions (though this may not effect the output)
simpleReactor(
temperature=(650,'K'),
pressure=(10.0,'bar'),
initialMoleFractions={
"Propyl": 1,
},
terminationConversion={
'Propyl': .99,
},
terminationTime=(40,'s'),
)
| Cut down on the loading of families in the normal GenerateReactionsTest | Cut down on the loading of families in the normal GenerateReactionsTest
Change generateReactions input reactant to propyl
| Python | mit | nickvandewiele/RMG-Py,nyee/RMG-Py,pierrelb/RMG-Py,chatelak/RMG-Py,pierrelb/RMG-Py,nickvandewiele/RMG-Py,chatelak/RMG-Py,nyee/RMG-Py |
25695e927fbbf46df385b4c68fa4d80b81283ace | indico/migrations/versions/20200904_1543_f37d509e221c_add_user_profile_picture_source_column.py | indico/migrations/versions/20200904_1543_f37d509e221c_add_user_profile_picture_source_column.py | """Add column for profile picture type to User
Revision ID: f37d509e221c
Revises: c997dc927fbc
Create Date: 2020-09-04 15:43:18.413156
"""
from enum import Enum
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
# revision identifiers, used by Alembic.
revision = 'f37d509e221c'
down_revision = 'c997dc927fbc'
branch_labels = None
depends_on = None
class _ProfilePictureSource(int, Enum):
standard = 0
identicon = 1
gravatar = 2
custom = 3
def upgrade():
op.add_column('users',
sa.Column('picture_source', PyIntEnum(_ProfilePictureSource), nullable=False, server_default='0'),
schema='users')
op.alter_column('users', 'picture_source', server_default=None, schema='users')
op.execute('UPDATE users.users SET picture_source = 3 WHERE picture IS NOT NULL')
def downgrade():
op.drop_column('users', 'picture_source', schema='users')
| """Add column for profile picture type to User
Revision ID: f37d509e221c
Revises: c997dc927fbc
Create Date: 2020-09-04 15:43:18.413156
"""
from enum import Enum
import sqlalchemy as sa
from alembic import op
from werkzeug.http import http_date
from indico.core.db.sqlalchemy import PyIntEnum
from indico.util.date_time import now_utc
# revision identifiers, used by Alembic.
revision = 'f37d509e221c'
down_revision = 'c997dc927fbc'
branch_labels = None
depends_on = None
class _ProfilePictureSource(int, Enum):
standard = 0
identicon = 1
gravatar = 2
custom = 3
def upgrade():
op.add_column('users',
sa.Column('picture_source', PyIntEnum(_ProfilePictureSource), nullable=False, server_default='0'),
schema='users')
op.alter_column('users', 'picture_source', server_default=None, schema='users')
op.execute('UPDATE users.users SET picture_source = 3 WHERE picture IS NOT NULL')
op.execute('''
UPDATE users.users
SET picture_metadata = picture_metadata || '{"lastmod": "%s"}'::jsonb
WHERE picture_source = 3 AND NOT (picture_metadata ? 'lastmod')
''' % http_date(now_utc()))
def downgrade():
op.drop_column('users', 'picture_source', schema='users')
| Add lastmod to existing profile picture metadata | Add lastmod to existing profile picture metadata
| Python | mit | pferreir/indico,indico/indico,pferreir/indico,indico/indico,indico/indico,DirkHoffmann/indico,ThiefMaster/indico,ThiefMaster/indico,ThiefMaster/indico,pferreir/indico,pferreir/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,DirkHoffmann/indico,DirkHoffmann/indico |
6384fd52a4d271f0f3403ae613dd66cbeb217ddf | indra/tests/test_biogrid.py | indra/tests/test_biogrid.py | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import biogrid_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
from indra.sources.biogrid import process_file
from indra.statements import Complex
import os
this_dir = os.path.dirname(__file__)
test_file = os.path.join(this_dir, 'biogrid_tests_data/biogrid_test.txt')
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = process_file(test_file)
# There are 50 statements in that file
statements = bp.statements
assert(len(statements) == 50)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import os
from nose.plugins.attrib import attr
from indra.statements import Complex
from indra.databases import biogrid_client
from indra.util import unicode_strs
from indra.sources.biogrid import BiogridProcessor
this_dir = os.path.dirname(__file__)
test_file = os.path.join(this_dir, 'biogrid_tests_data/biogrid_test.txt')
@attr('webservice', 'nonpublic')
def test_biogrid_request():
results = biogrid_client._send_request(['MAP2K1', 'MAPK1'])
assert results is not None
assert unicode_strs(results)
def test_biogrid_tsv():
# Download biogrid file form the web and process it
bp = BiogridProcessor(test_file)
# There are 50 statements in that file
statements = bp.statements
assert(len(statements) == 50)
# Any given statement should be a complex, with appropriate evidence
s0 = statements[0]
assert(isinstance(s0, Complex))
ev = s0.evidence[0]
assert(ev.source_api == 'biogrid')
assert(ev.text is None)
assert(ev.pmid is not None)
# The first statement in the file involves MAP2K4 and FLNC
assert(str(s0.members[0]) == 'MAP2K4()')
assert(str(s0.members[1]) == 'FLNC()')
| Update test to use new API | Update test to use new API
| Python | bsd-2-clause | johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy |
79c8d40d8a47a4413540acac671345dd5faed46e | suorganizer/urls.py | suorganizer/urls.py | """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
),
]
| """suorganizer URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from organizer.views import homepage, tag_detail
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^$', homepage),
url(r'^tag/(?P<slug>[\w\-]+)/$',
tag_detail,
name='organizer_tag_detail'),
]
| Add name parameter to Tag Detail URL. | Ch05: Add name parameter to Tag Detail URL.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 |
3d71a09837d73e2a976f1911ed072225ffc2f841 | marconiclient/auth/base.py | marconiclient/auth/base.py | # Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class AuthBackend(object):
def __init__(self, conf):
self.conf = conf
@abc.abstractmethod
def authenticate(self, api_version, request):
"""Authenticates the user in the selected backend.
Auth backends will have to manipulate the
request and prepare it to send the auth information
back to Marconi's instance.
:params api_version: Marconi's API verison.
:params request: Request Spec instance
that can be manipulated by the backend
if the authentication succeeds.
:returns: The modified request spec.
"""
class NoAuth(AuthBackend):
"""No Auth Plugin."""
def authenticate(self, api_version, req):
return req
| # Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class AuthBackend(object):
def __init__(self, conf):
self.conf = conf
@abc.abstractmethod
def authenticate(self, api_version, request):
"""Authenticates the user in the selected backend.
Auth backends will have to manipulate the
request and prepare it to send the auth information
back to Marconi's instance.
:params api_version: Marconi's API version.
:params request: Request Spec instance
that can be manipulated by the backend
if the authentication succeeds.
:returns: The modified request spec.
"""
class NoAuth(AuthBackend):
"""No Auth Plugin."""
def authenticate(self, api_version, req):
return req
| Fix misspellings in python marconiclient | Fix misspellings in python marconiclient
Fix misspellings detected by:
* pip install misspellings
* git ls-files | grep -v locale | misspellings -f -
Change-Id: I4bbc0ba5be154950a160871ef5675039697f2314
Closes-Bug: #1257295
| Python | apache-2.0 | openstack/python-zaqarclient |
38ae4ddab1a5b94d03941c4080df72fea4e750bc | defprogramming/settings_production.py | defprogramming/settings_production.py | from defprogramming.settings import *
ALLOWED_HOSTS = ['*']
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# MIDDLEWARE_CLASSES += ('sslify.middleware.SSLifyMiddleware',)
PREPEND_WWW = True
| import os
from defprogramming.settings import *
ALLOWED_HOSTS = ['*']
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# MIDDLEWARE_CLASSES += ('sslify.middleware.SSLifyMiddleware',)
PREPEND_WWW = True
SECRET_KEY = os.environ['SECRET_KEY']
| Add secret key env var | Add secret key env var
| Python | mit | daviferreira/defprogramming,daviferreira/defprogramming,daviferreira/defprogramming |
75c48ecbac476fd751e55745cc2935c1dac1f138 | longest_duplicated_substring.py | longest_duplicated_substring.py | #!/usr/bin/env python
import sys
# O(n^4) approach: generate all possible substrings and
# compare each for equality.
def longest_duplicated_substring(string):
"""Return the longest duplicated substring.
Keyword Arguments:
string -- the string to examine for duplicated substrings
This approach examines each possible pair of starting points
for duplicated substrings. If the characters at those points are
the same, the match is extended up to the maximum length for those
points. Each new longest duplicated substring is recorded as the
best found so far.
This solution is optimal for the naive brute-force approach and
runs in O(n^3).
"""
lds = ""
string_length = len(string)
for i in range(string_length):
for j in range(i+1,string_length):
# Alternate approach with while loop here and max update outside.
# Can also break length check into function.
for substring_length in range(string_length-j):
if string[i+substring_length] != string[j+substring_length]:
break
elif substring_length + 1 > len(lds):
lds = string[i:i+substring_length+1]
return lds
if __name__ == "__main__":
print(longest_duplicated_substring(' '.join(map(str, sys.argv[1:]))))
| #!/usr/bin/env python
import sys
def longest_duplicated_substring(string):
"""Return the longest duplicated substring.
Keyword Arguments:
string -- the string to examine for duplicated substrings
This approach examines each possible pair of starting points
for duplicated substrings. If the characters at those points are
the same, the match is extended up to the maximum length for those
points. Each new longest duplicated substring is recorded as the
best found so far.
This solution is optimal for the naive brute-force approach and
runs in O(n^3).
"""
lds = ""
string_length = len(string)
for i in range(string_length):
for j in range(i+1,string_length):
for substring_length in range(string_length-j):
if string[i+substring_length] != string[j+substring_length]:
break
elif substring_length + 1 > len(lds):
lds = string[i:i+substring_length+1]
return lds
if __name__ == "__main__":
print(longest_duplicated_substring(' '.join(map(str, sys.argv[1:]))))
| Move todos into issues tracking on GitHub | Move todos into issues tracking on GitHub
| Python | mit | taylor-peterson/longest-duplicated-substring |
847a88c579118f8a0d528284ab3ea029ccca7215 | git_pre_commit_hook/builtin_plugins/rst_check.py | git_pre_commit_hook/builtin_plugins/rst_check.py | import os
import fnmatch
import restructuredtext_lint
DEFAULTS = {
'files': '*.rst',
}
def make_message(error):
return '%s %s:%s %s\n' % (
error.type, error.source, error.line, error.message,
)
def check(file_staged_for_commit, options):
basename = os.path.basename(file_staged_for_commit.path)
if not fnmatch.fnmatch(basename, options.rst_files):
return True
errors = restructuredtext_lint.lint(
file_staged_for_commit.contents,
file_staged_for_commit.path,
)
if errors:
print('\n'.join(make_message(e) for e in errors))
return False
else:
return True
| """Check that files contains valid ReStructuredText."""
import os
import fnmatch
import restructuredtext_lint
DEFAULTS = {
'files': '*.rst',
}
def make_message(error):
return '%s %s:%s %s\n' % (
error.type, error.source, error.line, error.message,
)
def check(file_staged_for_commit, options):
basename = os.path.basename(file_staged_for_commit.path)
if not fnmatch.fnmatch(basename, options.rst_files):
return True
errors = restructuredtext_lint.lint(
file_staged_for_commit.contents,
file_staged_for_commit.path,
)
if errors:
print('\n'.join(make_message(e) for e in errors))
return False
else:
return True
| Add description to rst plugin | Add description to rst plugin
| Python | mit | evvers/git-pre-commit-hook |
bc7b1fc053150728095ec5d0a41611aa4d4ede45 | kerrokantasi/settings/__init__.py | kerrokantasi/settings/__init__.py | from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video,dialog,fakeobjects,iframe',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Iframe', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video,dialog,fakeobjects,iframe',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['BulletedList', 'NumberedList'],
['Image', 'Video', 'Iframe', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
| Remove JWT_AUTH check from settings | Remove JWT_AUTH check from settings
JWT settings has been removed in OpenID change and currently there isn't use for this.
| Python | mit | City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi,City-of-Helsinki/kerrokantasi |
c0fc60aa5fd51ac9a5795017fdc57d5b89b300e7 | tests/check_locale_format_consistency.py | tests/check_locale_format_consistency.py | import re
import json
import glob
locale_folder = "../locales/"
locale_files = glob.glob(locale_folder + "*.json")
locale_files = [filename.split("/")[-1] for filename in locale_files]
locale_files.remove("en.json")
reference = json.loads(open(locale_folder + "en.json").read())
for locale_file in locale_files:
this_locale = json.loads(open(locale_folder + locale_file).read())
for key, string in reference.items():
if key in this_locale:
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
subkeys_in_this_locale = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key]))
if any(key not in subkeys_in_ref for key in subkeys_in_this_locale):
print("\n")
print("==========================")
print("Format inconsistency for string %s in %s:" % (key, locale_file))
print("%s -> %s " % ("en.json", string))
print("%s -> %s " % (locale_file, this_locale[key]))
| import re
import json
import glob
# List all locale files (except en.json being the ref)
locale_folder = "../locales/"
locale_files = glob.glob(locale_folder + "*.json")
locale_files = [filename.split("/")[-1] for filename in locale_files]
locale_files.remove("en.json")
reference = json.loads(open(locale_folder + "en.json").read())
found_inconsistencies = False
# Let's iterate over each locale file
for locale_file in locale_files:
this_locale = json.loads(open(locale_folder + locale_file).read())
# We iterate over all keys/string in en.json
for key, string in reference.items():
# If there is a translation available for this key/string
if key in this_locale:
# Then we check that every "{stuff}" (for python's .format())
# should also be in the translated string, otherwise the .format
# will trigger an exception!
subkeys_in_ref = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", string))
subkeys_in_this_locale = set(k[0] for k in re.findall(r"{(\w+)(:\w)?}", this_locale[key]))
if any(key not in subkeys_in_ref for key in subkeys_in_this_locale):
found_inconsistencies = True
print("\n")
print("==========================")
print("Format inconsistency for string %s in %s:" % (key, locale_file))
print("%s -> %s " % ("en.json", string))
print("%s -> %s " % (locale_file, this_locale[key]))
if found_inconsistencies:
sys.exit(1)
| Add comments + return 1 if inconsistencies found | Add comments + return 1 if inconsistencies found
| Python | agpl-3.0 | YunoHost/yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost |
875e9df7d59cbf8d504696b1eb906f4da0ffabc2 | test/cooper_test.py | test/cooper_test.py | import pagoda.cooper
class Base(object):
def setUp(self):
self.world = pagoda.cooper.World()
class TestMarkers(Base):
def setUp(self):
super(TestMarkers, self).setUp()
self.markers = pagoda.cooper.Markers(self.world)
def test_c3d(self):
self.markers.load_c3d('examples/cooper-motion.c3d')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
def test_csv(self):
return # TODO
self.markers.load_csv('examples/cooper-motion.csv')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
| import pagoda.cooper
class Base(object):
def setUp(self):
self.world = pagoda.cooper.World()
class TestMarkers(Base):
def setUp(self):
super(TestMarkers, self).setUp()
self.markers = pagoda.cooper.Markers(self.world)
def test_c3d(self):
self.markers.load_c3d('examples/cooper-motion.c3d')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
def test_csv(self):
return # TODO
self.markers.load_csv('examples/cooper-motion.csv')
assert self.markers.num_frames == 343
assert len(self.markers.marker_bodies) == 41
assert len(self.markers.attach_bodies) == 0
assert len(self.markers.attach_offsets) == 0
assert len(self.markers.channels) == 41
| Fix style in cooper test. | Fix style in cooper test.
| Python | mit | EmbodiedCognition/pagoda,EmbodiedCognition/pagoda |
423dcb102fc2b7a1108a0b0fe1e116e8a5d451c9 | netsecus/korrekturtools.py | netsecus/korrekturtools.py | from __future__ import unicode_literals
import os
def readStatus(student):
student = student.lower()
if not os.path.exists("attachments"):
return
if not os.path.exists(os.path.join("attachments", student)):
return "Student ohne Abgabe"
if not os.path.exists(os.path.join("attachments", student, "korrekturstatus.txt")):
return "Unbearbeitet"
statusfile = open(os.path.join("attachments", student, "korrekturstatus.txt"), "r")
status = statusfile.read()
statusfile.close()
return status
def writeStatus(student, status):
student = student.lower()
status = status.lower()
if not os.path.exists(os.path.join("attachments", student)):
return
statusfile = open(os.path.join("attachments", student, "korrekturstatus.txt"), "w")
statusfile.write(status)
statusfile.close()
| from __future__ import unicode_literals
import os
from . import helper
def readStatus(student):
student = student.lower()
if not os.path.exists("attachments"):
return
if not os.path.exists(os.path.join("attachments", student)):
return "Student ohne Abgabe"
if not os.path.exists(os.path.join("attachments", student, "korrekturstatus.txt")):
return "Unbearbeitet"
statusfile = open(os.path.join("attachments", student, "korrekturstatus.txt"), "r")
status = statusfile.read()
statusfile.close()
return status
def writeStatus(student, status):
student = student.lower()
status = status.lower()
if not os.path.exists(os.path.join("attachments", student)):
logging.error("Requested student '%s' hasn't submitted anything yet.")
return
statusfile = open(os.path.join("attachments", student, "korrekturstatus.txt"), "w")
statusfile.write(status)
statusfile.close()
| Add error message for malformed request | Add error message for malformed request
| Python | mit | hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem |
c88789847a9bf604d897f4b469a3585347fef3f9 | portality/migrate/2819_clean_unused_license_data/operations.py | portality/migrate/2819_clean_unused_license_data/operations.py | def clean(record):
if record.bibjson().get_journal_license():
record.bibjson().remove_journal_license()
return record
| def clean(record):
if record.bibjson().get_journal_license():
record.bibjson().remove_journal_licences()
return record
| Fix another typo in migration | Fix another typo in migration
| Python | apache-2.0 | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj |
3cbc3b96d3f91c940c5d762ce08da9814c29b04d | utils/gyb_syntax_support/protocolsMap.py | utils/gyb_syntax_support/protocolsMap.py | SYNTAX_BUILDABLE_EXPRESSIBLE_BY_CONFORMANCES = {
'ExpressibleByConditionElement': [
'ExpressibleByConditionElementList'
],
'ExpressibleByDeclBuildable': [
'ExpressibleByCodeBlockItem',
'ExpressibleByMemberDeclListItem',
'ExpressibleBySyntaxBuildable'
],
'ExpressibleByStmtBuildable': [
'ExpressibleByCodeBlockItem',
'ExpressibleBySyntaxBuildable'
],
'ExpressibleByExprList': [
'ExpressibleByConditionElement',
'ExpressibleBySyntaxBuildable'
]
}
| SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = {
'ExpressibleAsConditionElement': [
'ExpressibleAsConditionElementList'
],
'ExpressibleAsDeclBuildable': [
'ExpressibleAsCodeBlockItem',
'ExpressibleAsMemberDeclListItem',
'ExpressibleAsSyntaxBuildable'
],
'ExpressibleAsStmtBuildable': [
'ExpressibleAsCodeBlockItem',
'ExpressibleAsSyntaxBuildable'
],
'ExpressibleAsExprList': [
'ExpressibleAsConditionElement',
'ExpressibleAsSyntaxBuildable'
]
}
| Revert "[SwiftSyntax] Replace ExpressibleAs protocols by ExpressibleBy protocols" | Revert "[SwiftSyntax] Replace ExpressibleAs protocols by ExpressibleBy protocols"
| Python | apache-2.0 | roambotics/swift,glessard/swift,ahoppen/swift,roambotics/swift,apple/swift,roambotics/swift,gregomni/swift,ahoppen/swift,JGiola/swift,JGiola/swift,apple/swift,gregomni/swift,benlangmuir/swift,gregomni/swift,glessard/swift,atrick/swift,benlangmuir/swift,ahoppen/swift,atrick/swift,benlangmuir/swift,gregomni/swift,atrick/swift,glessard/swift,rudkx/swift,benlangmuir/swift,glessard/swift,apple/swift,benlangmuir/swift,ahoppen/swift,rudkx/swift,roambotics/swift,roambotics/swift,glessard/swift,glessard/swift,ahoppen/swift,atrick/swift,apple/swift,JGiola/swift,JGiola/swift,rudkx/swift,atrick/swift,rudkx/swift,gregomni/swift,rudkx/swift,rudkx/swift,roambotics/swift,atrick/swift,gregomni/swift,apple/swift,JGiola/swift,ahoppen/swift,JGiola/swift,apple/swift,benlangmuir/swift |
1b3f97ff7bc219588b94a2346ac91f10203e44b9 | matador/commands/deployment/__init__.py | matador/commands/deployment/__init__.py | from .deploy_sql_script import DeploySqlScript, DeployOraclePackage
from .deploy_report import DeployExceleratorReport
| from .deploy_sql_script import DeploySqlScript, DeployOraclePackage
from .deploy_report import DeployExceleratorReport, DeployReportFile
| Add report file deployment to init | Add report file deployment to init
| Python | mit | Empiria/matador |
7ea03c6ded823458d7159c05f89d99ee3c4a2e42 | scripts/tools/botmap.py | scripts/tools/botmap.py | #!/usr/bin/env python
import os
import sys
path = os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')
sys.path.append(path)
import chromium_utils
slaves = []
for master in chromium_utils.ListMasters():
masterbase = os.path.basename(master)
master_slaves = {}
execfile(os.path.join(master, 'slaves.cfg'), master_slaves)
for slave in master_slaves.get('slaves', []):
slave['master'] = masterbase
slaves.extend(master_slaves.get('slaves', []))
for slave in sorted(slaves, cmp=None, key=lambda x : x.get('hostname', '')):
slavename = slave.get('hostname')
if not slavename:
continue
osname = slave.get('os', '?')
print '%-30s %-35s %-10s' % (slavename, slave.get('master', '?'), osname)
| #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dumps a list of known slaves, along with their OS and master."""
import os
import sys
path = os.path.join(os.path.dirname(__file__), os.path.pardir)
sys.path.append(path)
from common import chromium_utils
slaves = []
for master in chromium_utils.ListMasters():
masterbase = os.path.basename(master)
master_slaves = {}
execfile(os.path.join(master, 'slaves.cfg'), master_slaves)
for slave in master_slaves.get('slaves', []):
slave['master'] = masterbase
slaves.extend(master_slaves.get('slaves', []))
for slave in sorted(slaves, cmp=None, key=lambda x : x.get('hostname', '')):
slavename = slave.get('hostname')
if not slavename:
continue
osname = slave.get('os', '?')
print '%-30s %-35s %-10s' % (slavename, slave.get('master', '?'), osname)
| Tweak import statement to satisfy presubmit checks. | Tweak import statement to satisfy presubmit checks.
Review URL: http://codereview.chromium.org/8292004
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@105578 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
6fc5a47efbd4b760672b13292c5c4886842fbdbd | tests/local_test.py | tests/local_test.py | from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
| from nose.tools import istest, assert_equal
from spur import LocalShell
shell = LocalShell()
@istest
def output_of_run_is_stored():
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@istest
def cwd_of_run_can_be_set():
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@istest
def environment_variables_can_be_added_for_run():
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| Add test for LocalShell.run with update_env | Add test for LocalShell.run with update_env
| Python | bsd-2-clause | mwilliamson/spur.py |
16eb7232c3bf8470ca37c5e67d1af7d86b5c7b14 | test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py | test/integration/022_bigquery_test/test_bigquery_copy_failing_models.py | from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryCopyTableFails(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "copy-failing-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
original:
materialized: table
copy_bad_materialization:
materialized: copy
'''))
@use_profile('bigquery')
def test__bigquery_copy_table_fails(self):
results = self.run_dbt(expect_pass=False)
self.assertEqual(len(results), 2)
self.assertTrue(results[0].error)
| from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryCopyTableFails(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "copy-failing-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
original:
materialized: table
copy_bad_materialization:
materialized: copy
'''))
@use_profile('bigquery')
def test__bigquery_copy_table_fails(self):
results = self.run_dbt(expect_pass=False)
self.assertEqual(len(results), 2)
self.assertTrue(results[1].error)
| Check the copy model for failure | Check the copy model for failure
| Python | apache-2.0 | analyst-collective/dbt,analyst-collective/dbt |
08d1db2f6031d3496309ae290e4d760269706d26 | meinberlin/config/settings/dev.py | meinberlin/config/settings/dev.py | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
LOGGING = {
'version': 1,
'handlers': {
'console': {
'class': 'logging.StreamHandler'},
},
'loggers': {'background_task': {'handlers': ['console'], 'level': 'INFO'}}}
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| Print tracebacks that happened in tasks | Print tracebacks that happened in tasks
| Python | agpl-3.0 | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin |
f55d590004874f9ec64c041b5630321e686bf6f9 | mindbender/plugins/validate_id.py | mindbender/plugins/validate_id.py | import pyblish.api
class ValidateMindbenderID(pyblish.api.InstancePlugin):
"""All models must have an ID attribute"""
label = "Mindbender ID"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = ["mindbender.model"]
def process(self, instance):
from maya import cmds
nodes = list(instance)
nodes += cmds.listRelatives(instance, allDescendents=True) or list()
missing = list()
for node in nodes:
# Only check transforms with a shape
if not cmds.listRelatives(node, shapes=True):
continue
try:
self.log.info("Checking '%s'" % node)
cmds.getAttr(node + ".mbID")
except ValueError:
missing.append(node)
assert not missing, ("Missing ID attribute on: %s"
% ", ".join(missing))
| import pyblish.api
class ValidateMindbenderID(pyblish.api.InstancePlugin):
"""All models must have an ID attribute"""
label = "Mindbender ID"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = ["mindbender.model", "mindbender.lookdev"]
def process(self, instance):
from maya import cmds
nodes = list(instance)
nodes += cmds.listRelatives(instance, allDescendents=True) or list()
missing = list()
for node in nodes:
# Only check transforms with a shape
if not cmds.listRelatives(node, shapes=True):
continue
try:
self.log.info("Checking '%s'" % node)
cmds.getAttr(node + ".mbID")
except ValueError:
missing.append(node)
assert not missing, ("Missing ID attribute on: %s"
% ", ".join(missing))
| Extend ID validator to lookdev | Extend ID validator to lookdev
| Python | mit | mindbender-studio/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core,MoonShineVFX/core,getavalon/core,pyblish/pyblish-mindbender |
09be419960d208967771d93025c4f86b80ebe4e9 | python/qibuild/__init__.py | python/qibuild/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2018 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2018 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" This module contains a few functions for running CMake and building projects. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
QIBUILD_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def stringify_env(env):
""" convert each key value pairs to strings in env list"""
return dict(((str(key), str(val)) for key, val in env.items()))
| Revert "use utf-8 by default" | Revert "use utf-8 by default"
This reverts commit a986aac5e3b4f065d6c2ab70129bde105651d2ca.
| Python | bsd-3-clause | aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild |
c7cb6c1441bcfe359a9179858492044591e80007 | osgtest/tests/test_10_condor.py | osgtest/tests/test_10_condor.py | from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor')
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
| from os.path import join
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.condor as condor
import osgtest.library.osgunittest as osgunittest
import osgtest.library.service as service
personal_condor_config = '''
DAEMON_LIST = COLLECTOR, MASTER, NEGOTIATOR, SCHEDD, STARTD
CONDOR_HOST = $(FULL_HOSTNAME)
'''
class TestStartCondor(osgunittest.OSGTestCase):
def test_01_start_condor(self):
core.state['condor.running-service'] = False
core.skip_ok_unless_installed('condor')
core.config['condor.collectorlog'] = condor.config_val('COLLECTOR_LOG')
if service.is_running('condor'):
core.state['condor.running-service'] = True
return
core.config['condor.personal_condor'] = join(condor.config_val('LOCAL_CONFIG_DIR'), '99-personal-condor.conf')
files.write(core.config['condor.personal_condor'], personal_condor_config, owner='condor', chmod=0o644)
core.config['condor.collectorlog_stat'] = core.get_stat(core.config['condor.collectorlog'])
service.check_start('condor')
core.state['condor.started-service'] = True
core.state['condor.running-service'] = True
| Make the personal condor config world readable | Make the personal condor config world readable
| Python | apache-2.0 | efajardo/osg-test,efajardo/osg-test |
d8b477083866a105947281ca34cb6e215417f44d | packs/salt/actions/lib/utils.py | packs/salt/actions/lib/utils.py | import yaml
action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
def generate_action(module_type, action):
manifest = action_meta
manifest['name'] = "{0}_{1}".format(module_type, action)
manifest['parameters']['action']['default'] = action
fh = open('{0}_{1}.yaml'.format(module_type, action), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
def sanitize_payload(keys_to_sanitize, payload):
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
| # pylint: disable=line-too-long
import yaml
from .meta import actions
runner_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Runner functions through Salt API",
"enabled": True,
"entry_point": "runner.py"}
local_action_meta = {
"name": "",
"parameters": {
"action": {
"type": "string",
"immutable": True,
"default": ""
},
"args": {
"type": "array",
"required": False
},
"kwargs": {
"type": "object",
"required": False
}
},
"runner_type": "run-python",
"description": "Run Salt Execution modules through Salt API",
"enabled": True,
"entry_point": "local.py"}
def generate_actions():
def create_file(mt, m, a):
manifest = local_action_meta
manifest['name'] = "{0}_{1}.{2}".format(mt, m, a)
manifest['parameters']['action']['default'] = "{0}.{1}".format(m, a)
fh = open('{0}_{1}.{2}.yaml'.format(mt, m, a), 'w')
fh.write('---\n')
fh.write(yaml.dump(manifest, default_flow_style=False))
fh.close()
for key in actions:
map(lambda l: create_file('local', key, l), actions[key])
def sanitize_payload(keys_to_sanitize, payload):
'''
Removes sensitive data from payloads before
publishing to the logs
'''
data = payload.copy()
map(lambda k: data.update({k: "*" * len(payload[k])}), keys_to_sanitize)
return data
| Make distinction between local and runner action payload templates. Added small description for sanitizing the NetAPI payload for logging. | Make distinction between local and runner action payload templates.
Added small description for sanitizing the NetAPI payload for logging.
| Python | apache-2.0 | pidah/st2contrib,StackStorm/st2contrib,psychopenguin/st2contrib,lmEshoo/st2contrib,armab/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,digideskio/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,armab/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,pidah/st2contrib |
60625877a23e26e66c2c97cbeb4f139ede717eda | B.py | B.py | #! /usr/bin/env python3
# coding: utf-8
from collections import namedtuple
import matplotlib.pyplot as plt
BCand = namedtuple('BCand', ['m', 'merr', 'pt', 'p'])
bs = []
with open('B.txt') as f:
for line in f.readlines()[1:]:
bs.append(BCand(*[float(v) for v in line.strip().split(',')]))
masses = [b.m for b in bs]
plt.hist(masses, 60, histtype='stepfilled')
plt.xlabel(r'$m_B / \mathrm{GeV}$')
plt.savefig('mass.pdf')
| #! /usr/bin/env python3
# coding: utf-8
from collections import namedtuple
import matplotlib.pyplot as plt
import numpy as np
BCand = namedtuple('BCand', ['m', 'merr', 'pt', 'p'])
bs = [BCand(*b) for b in np.genfromtxt('B.txt', skip_header=1, delimiter=',')]
masses = [b.m for b in bs]
ns, bins, _ = plt.hist(masses, 60, histtype='stepfilled', facecolor='r',
edgecolor='none')
centers = bins[:-1] + (bins[1:] - bins[:-1]) / 2
merr = np.sqrt(ns)
plt.errorbar(centers, ns, yerr=merr, fmt='b+')
plt.xlabel(r'$m_B / \mathrm{GeV}$')
plt.savefig('mass.pdf')
| Use numpy for readin and add errorbars. | Use numpy for readin and add errorbars.
| Python | mit | bixel/python-introduction |
2dcb159bdd826ceeb68658cc3760c97dae04289e | partner_firstname/exceptions.py | partner_firstname/exceptions.py | # -*- encoding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp import _, exceptions
class EmptyNamesError(exceptions.ValidationError):
def __init__(self, record, value=_("No name is set.")):
self.record = record
self._value = value
self.name = _("Error(s) with partner %d's name.") % record.id
| # -*- encoding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp import _, exceptions
class EmptyNamesError(exceptions.ValidationError):
def __init__(self, record, value=_("No name is set.")):
self.record = record
self._value = value
self.name = _("Error(s) with partner %d's name.") % record.id
self.args = (self.name, value)
| Add args to exception to display the correct message in the UI. | Add args to exception to display the correct message in the UI.
| Python | agpl-3.0 | BT-ojossen/partner-contact,Ehtaga/partner-contact,BT-fgarbely/partner-contact,acsone/partner-contact,BT-jmichaud/partner-contact,charbeljc/partner-contact,sergiocorato/partner-contact,Antiun/partner-contact,raycarnes/partner-contact,idncom/partner-contact,Endika/partner-contact,gurneyalex/partner-contact,QANSEE/partner-contact,Therp/partner-contact,alanljj/oca-partner-contact,diagramsoftware/partner-contact,akretion/partner-contact,andrius-preimantas/partner-contact,open-synergy/partner-contact |
Subsets and Splits