commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
---|---|---|---|---|---|---|---|---|---|---|
bcb383612625d9a59f9e5b4174e44700b26bd0e5 | crosscompute/macros/security.py | crosscompute/macros/security.py | from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
| from datetime import datetime, timedelta
from invisibleroads_macros_security import make_random_string
class DictionarySafe(dict):
def __init__(self, key_length):
self.key_length = key_length
def put(self, value, time_in_seconds=None):
while True:
key = make_random_string(self.key_length)
try:
self[key]
except KeyError:
break
self.set(key, value, time_in_seconds)
return key
def set(self, key, value, time_in_seconds=None):
self[key] = value, get_expiration_datetime(time_in_seconds)
def get(self, key):
value, expiration_datetime = self[key]
if expiration_datetime and datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value
def get_expiration_datetime(time_in_seconds):
if not time_in_seconds:
return
return datetime.now() + timedelta(seconds=time_in_seconds)
def evaluate_expression(expression_string, value_by_name):
# https://realpython.com/python-eval-function
code = compile(expression_string, '<string>', 'eval')
for name in code.co_names:
if name not in value_by_name:
raise NameError(f'{name} not defined')
return eval(code, {'__builtins__': {}}, value_by_name)
| Support case when expiration_datetime is None | Support case when expiration_datetime is None
| Python | mit | crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute,crosscompute/crosscompute | ---
+++
@@ -23,7 +23,7 @@
def get(self, key):
value, expiration_datetime = self[key]
- if datetime.now() > expiration_datetime:
+ if expiration_datetime and datetime.now() > expiration_datetime:
del self[key]
raise KeyError
return value |
e868cf738f2fb994f22364fa8306045c6995a2b1 | settings.py | settings.py | RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'date': {
'type': 'datetime',
},
'reference': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
| RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'datetime': {
'type': 'datetime',
},
'reference': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
| Change name of date to datetime | Change name of date to datetime
| Python | mit | waoliveros/historia | ---
+++
@@ -8,7 +8,7 @@
'maxlength': 50,
'required': True,
},
- 'date': {
+ 'datetime': {
'type': 'datetime',
},
'reference': { |
4bcb7efc2c95280323995cb0de27cf6449f060b8 | external_tools/src/main/python/images/common.py | external_tools/src/main/python/images/common.py | #!/usr/bin/python
splitString='images/clean/impc/' | #!/usr/bin/python
#splitString='images/clean/impc/'
splitString='images/holding_area/impc/'
| Change to use holding_area directory | Change to use holding_area directory
| Python | apache-2.0 | mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData,mpi2/PhenotypeData | ---
+++
@@ -1,2 +1,3 @@
#!/usr/bin/python
-splitString='images/clean/impc/'
+#splitString='images/clean/impc/'
+splitString='images/holding_area/impc/' |
7db970b508c9d7ea3d659fe8b2fa5a852f16abd1 | tcconfig/_common.py | tcconfig/_common.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import dataproperty
import six
from ._error import NetworkInterfaceNotFoundError
def verify_network_interface(device):
try:
import netifaces
except ImportError:
return
if device not in netifaces.interfaces():
raise NetworkInterfaceNotFoundError(
"network interface not found: " + device)
def sanitize_network(network):
"""
:return: Network string
:rtype: str
:raises ValueError: if the network string is invalid.
"""
import ipaddress
if dataproperty.is_empty_string(network):
return ""
try:
ipaddress.IPv4Address(six.u(network))
return network + "/32"
except ipaddress.AddressValueError:
pass
ipaddress.IPv4Network(six.u(network)) # validate network str
return network
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import dataproperty
import six
from ._error import NetworkInterfaceNotFoundError
def verify_network_interface(device):
try:
import netifaces
except ImportError:
return
if device not in netifaces.interfaces():
raise NetworkInterfaceNotFoundError(
"network interface not found: " + device)
def sanitize_network(network):
"""
:return: Network string
:rtype: str
:raises ValueError: if the network string is invalid.
"""
import ipaddress
if dataproperty.is_empty_string(network):
return ""
if network == "anywhere":
return "0.0.0.0/0"
try:
ipaddress.IPv4Address(six.u(network))
return network + "/32"
except ipaddress.AddressValueError:
pass
ipaddress.IPv4Network(six.u(network)) # validate network str
return network
| Add special case for "anywhere" | Add special case for "anywhere"
| Python | mit | thombashi/tcconfig,thombashi/tcconfig | ---
+++
@@ -35,6 +35,9 @@
if dataproperty.is_empty_string(network):
return ""
+ if network == "anywhere":
+ return "0.0.0.0/0"
+
try:
ipaddress.IPv4Address(six.u(network))
return network + "/32" |
384cb60a488b03ac992c3496658c299f3393b807 | tests/TestConfigFileLoading.py | tests/TestConfigFileLoading.py | import unittest
import src
import sys
from io import StringIO
class TestConfigFileLoading(unittest.TestCase):
filepath_prefix = ''
@classmethod
def setUpClass(cls):
if sys.argv[0].endswith('nosetests'):
cls.filepath_prefix = "./resources/config/"
else:
cls.filepath_prefix = "../resources/config/"
def TestReadJsonCorrectly(self):
config_file = self.filepath_prefix + "default-config.json"
exp_res = 17
config_json = src.read_json(config_file)
result = len(config_json)
self.assertEqual(result, exp_res)
def TestReadJsonBadFileName(self):
config_file = self.filepath_prefix + "fig.json"
with self.assertRaises(FileNotFoundError):
src.read_json(config_file)
def TestLoadConfigFileCheckRead(self):
exp_res = "Read styling config JSON correctly."
out = StringIO()
src.load_config_file(out=out)
result = out.getvalue().strip()
self.assertEqual(result, exp_res)
def TestDefaultConfigOverwritten(self):
exp_res = 666
config = src.load_config_file()
result = config["max_line_length"]
self.assertEqual(result, exp_res)
def TestDefaultConfigPersists(self):
exp_res = True
config = src.load_config_file()
result = config["spellcheck"]
self.assertEqual(result, exp_res)
| import unittest
import src
import sys
from io import StringIO
class TestConfigFileLoading(unittest.TestCase):
filepath_prefix = ''
@classmethod
def setUpClass(cls):
if sys.argv[0].endswith('nosetests'):
cls.filepath_prefix = "./resources/config/"
else:
cls.filepath_prefix = "../resources/config/"
def testReadJsonCorrectly(self):
config_file = self.filepath_prefix + "default-config.json"
exp_res = 17
config_json = src.read_json(config_file)
result = len(config_json)
self.assertEqual(result, exp_res)
def testReadJsonBadFileName(self):
config_file = self.filepath_prefix + "fig.json"
with self.assertRaises(FileNotFoundError):
src.read_json(config_file)
def testLoadConfigFileCheckRead(self):
exp_res = "Read styling config JSON correctly."
out = StringIO()
src.load_config_file(out=out)
result = out.getvalue().strip()
self.assertEqual(result, exp_res)
def testDefaultConfigOverwritten(self):
exp_res = 666
config = src.load_config_file()
result = config["max_line_length"]
self.assertEqual(result, exp_res)
def testDefaultConfigPersists(self):
exp_res = True
config = src.load_config_file()
result = config["spellcheck"]
self.assertEqual(result, exp_res)
| Make test names lower case prefix | Make test names lower case prefix
| Python | bsd-3-clause | sky-uk/bslint | ---
+++
@@ -15,32 +15,32 @@
else:
cls.filepath_prefix = "../resources/config/"
- def TestReadJsonCorrectly(self):
+ def testReadJsonCorrectly(self):
config_file = self.filepath_prefix + "default-config.json"
exp_res = 17
config_json = src.read_json(config_file)
result = len(config_json)
self.assertEqual(result, exp_res)
- def TestReadJsonBadFileName(self):
+ def testReadJsonBadFileName(self):
config_file = self.filepath_prefix + "fig.json"
with self.assertRaises(FileNotFoundError):
src.read_json(config_file)
- def TestLoadConfigFileCheckRead(self):
+ def testLoadConfigFileCheckRead(self):
exp_res = "Read styling config JSON correctly."
out = StringIO()
src.load_config_file(out=out)
result = out.getvalue().strip()
self.assertEqual(result, exp_res)
- def TestDefaultConfigOverwritten(self):
+ def testDefaultConfigOverwritten(self):
exp_res = 666
config = src.load_config_file()
result = config["max_line_length"]
self.assertEqual(result, exp_res)
- def TestDefaultConfigPersists(self):
+ def testDefaultConfigPersists(self):
exp_res = True
config = src.load_config_file()
result = config["spellcheck"] |
9495a43e0797d1a089df644663900957cadc3ac0 | tests/agents_tests/test_iqn.py | tests/agents_tests/test_iqn.py | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
from builtins import * # NOQA
standard_library.install_aliases() # NOQA
import chainer.functions as F
import chainer.links as L
import basetest_dqn_like as base
import chainerrl
from chainerrl.agents import iqn
class TestIQNOnDiscreteABC(base._TestDQNOnDiscreteABC):
def make_q_func(self, env):
obs_size = env.observation_space.low.size
hidden_size = 64
return iqn.ImplicitQuantileQFunction(
psi=chainerrl.links.Sequence(
L.Linear(obs_size, hidden_size),
F.relu,
),
phi=iqn.CosineBasisLinearReLU(64, hidden_size),
f=L.Linear(hidden_size, env.action_space.n),
)
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return iqn.IQN(
q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100)
| from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
from builtins import * # NOQA
standard_library.install_aliases() # NOQA
import chainer.functions as F
import chainer.links as L
from chainer import testing
import basetest_dqn_like as base
import chainerrl
from chainerrl.agents import iqn
@testing.parameterize(*testing.product({
'quantile_thresholds_N': [1, 5],
'quantile_thresholds_N_prime': [1, 7],
}))
class TestIQNOnDiscreteABC(base._TestDQNOnDiscreteABC):
def make_q_func(self, env):
obs_size = env.observation_space.low.size
hidden_size = 64
return iqn.ImplicitQuantileQFunction(
psi=chainerrl.links.Sequence(
L.Linear(obs_size, hidden_size),
F.relu,
),
phi=iqn.CosineBasisLinearReLU(64, hidden_size),
f=L.Linear(hidden_size, env.action_space.n),
)
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return iqn.IQN(
q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
replay_start_size=100, target_update_interval=100,
quantile_thresholds_N=self.quantile_thresholds_N,
quantile_thresholds_N_prime=self.quantile_thresholds_N_prime,
)
| Test multiple values of N and N_prime | Test multiple values of N and N_prime
| Python | mit | toslunar/chainerrl,toslunar/chainerrl | ---
+++
@@ -8,12 +8,17 @@
import chainer.functions as F
import chainer.links as L
+from chainer import testing
import basetest_dqn_like as base
import chainerrl
from chainerrl.agents import iqn
[email protected](*testing.product({
+ 'quantile_thresholds_N': [1, 5],
+ 'quantile_thresholds_N_prime': [1, 7],
+}))
class TestIQNOnDiscreteABC(base._TestDQNOnDiscreteABC):
def make_q_func(self, env):
@@ -31,4 +36,7 @@
def make_dqn_agent(self, env, q_func, opt, explorer, rbuf, gpu):
return iqn.IQN(
q_func, opt, rbuf, gpu=gpu, gamma=0.9, explorer=explorer,
- replay_start_size=100, target_update_interval=100)
+ replay_start_size=100, target_update_interval=100,
+ quantile_thresholds_N=self.quantile_thresholds_N,
+ quantile_thresholds_N_prime=self.quantile_thresholds_N_prime,
+ ) |
7882831c7a027e778c9f14679c3a5639ca3ae29d | snipts/admin.py | snipts/admin.py | from django.contrib import admin
from snipts.models import Favorite, Snipt
class SniptAdmin(admin.ModelAdmin):
readonly_fields = ('user',)
list_display = ('title', 'slug', 'user', 'lexer', 'public', 'created', 'modified',)
search_fields = ('title', 'user__username', 'lexer', 'id',)
ordering = ('-created',)
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Snipt, SniptAdmin)
class FavoriteAdmin(admin.ModelAdmin):
readonly_fields = ('snipt', 'user',)
list_display = ('snipt', 'user',)
search_fields = ('snipt', 'user',)
ordering = ('-created',)
admin.site.register(Favorite, FavoriteAdmin)
| from django.contrib import admin
from snipts.models import Favorite, Snipt
class SniptAdmin(admin.ModelAdmin):
readonly_fields = ('user',)
list_display = ('title', 'slug', 'user', 'lexer', 'public', 'created', 'modified',)
search_fields = ('title', 'user__username', 'lexer', 'id', 'key',)
ordering = ('-created',)
prepopulated_fields = {'slug': ('title',)}
admin.site.register(Snipt, SniptAdmin)
class FavoriteAdmin(admin.ModelAdmin):
readonly_fields = ('snipt', 'user',)
list_display = ('snipt', 'user',)
search_fields = ('snipt', 'user',)
ordering = ('-created',)
admin.site.register(Favorite, FavoriteAdmin)
| Allow searching keys on snipts | Allow searching keys on snipts
| Python | mit | nicksergeant/snipt,nicksergeant/snipt,nicksergeant/snipt | ---
+++
@@ -5,7 +5,7 @@
class SniptAdmin(admin.ModelAdmin):
readonly_fields = ('user',)
list_display = ('title', 'slug', 'user', 'lexer', 'public', 'created', 'modified',)
- search_fields = ('title', 'user__username', 'lexer', 'id',)
+ search_fields = ('title', 'user__username', 'lexer', 'id', 'key',)
ordering = ('-created',)
prepopulated_fields = {'slug': ('title',)}
|
eaa3d6094c92eb17f5074279a0c23ec363cddd1b | rnacentral/portal/models/secondary_structure.py | rnacentral/portal/models/secondary_structure.py | """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.db import models
class SecondaryStructure(models.Model):
id = models.AutoField(primary_key=True)
accession = models.ForeignKey(
'Accession',
db_column='rnc_accession_id',
to_field='accession',
related_name='secondary_structure',
)
secondary_structure = models.TextField()
md5 = models.CharField(max_length=32, db_index=True)
class Meta:
db_table = 'rnc_secondary_structure'
unique_together = (('accession', 'md5'),)
| """
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.db import models
class SecondaryStructure(models.Model):
id = models.AutoField(primary_key=True)
accession = models.OneToOneField(
'Accession',
db_column='rnc_accession_id',
to_field='accession',
related_name='secondary_structure',
)
secondary_structure = models.TextField()
md5 = models.CharField(max_length=32, db_index=True)
class Meta:
db_table = 'rnc_secondary_structure'
unique_together = (('accession', 'md5'),)
| Use OneToOneField on SecondaryStructure model | Use OneToOneField on SecondaryStructure model
| Python | apache-2.0 | RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode | ---
+++
@@ -16,7 +16,7 @@
class SecondaryStructure(models.Model):
id = models.AutoField(primary_key=True)
- accession = models.ForeignKey(
+ accession = models.OneToOneField(
'Accession',
db_column='rnc_accession_id',
to_field='accession', |
f549fd217f9d838af1e6f028cdae14119da72fb4 | tests/test_api_network.py | tests/test_api_network.py | from devicehive import NetworkError
from devicehive import ApiResponseError
def test_save(test):
def handle_connect(handler):
name = test.generate_id('n-s')
description = '%s-description' % name
network = handler.api.create_network(name, description)
name = test.generate_id('n-s')
description = '%s-description' % name
network.name = name
network.description = description
network.save()
network_1 = handler.api.get_network(network.id)
network.remove()
try:
network.save()
assert False
except NetworkError:
pass
try:
network_1.save()
assert False
except ApiResponseError as api_response_error:
# TODO: uncomment after server response will be fixed.
# assert api_response_error.code == 404
pass
test.run(handle_connect)
def test_remove(test):
def handle_connect(handler):
name = test.generate_id('n-r')
description = '%s-description' % name
network = handler.api.create_network(name, description)
network_1 = handler.api.get_network(network.id)
network.remove()
assert not network.id
assert not network.name
assert not network.description
try:
network.remove()
assert False
except NetworkError:
pass
try:
network_1.remove()
assert False
except ApiResponseError as api_response_error:
# TODO: uncomment after server response will be fixed.
# assert api_response_error.code == 404
pass
test.run(handle_connect)
| from devicehive import NetworkError
from devicehive import ApiResponseError
def test_save(test):
def handle_connect(handler):
name = test.generate_id('n-s')
description = '%s-description' % name
network = handler.api.create_network(name, description)
name = test.generate_id('n-s')
description = '%s-description' % name
network.name = name
network.description = description
network.save()
network_1 = handler.api.get_network(network.id)
network.remove()
try:
network.save()
assert False
except NetworkError:
pass
try:
network_1.save()
assert False
except ApiResponseError as api_response_error:
# TODO: uncomment after server response will be fixed.
# assert api_response_error.code == 404
pass
test.run(handle_connect)
def test_remove(test):
def handle_connect(handler):
name = test.generate_id('n-r')
description = '%s-description' % name
network = handler.api.create_network(name, description)
network_1 = handler.api.get_network(network.id)
network.remove()
assert not network.id
assert not network.name
assert not network.description
try:
network.remove()
assert False
except NetworkError:
pass
try:
network_1.remove()
assert False
except ApiResponseError as api_response_error:
assert api_response_error.code == 404
test.run(handle_connect)
| Add code test for test_remove function | Add code test for test_remove function
| Python | apache-2.0 | devicehive/devicehive-python | ---
+++
@@ -51,8 +51,6 @@
network_1.remove()
assert False
except ApiResponseError as api_response_error:
- # TODO: uncomment after server response will be fixed.
- # assert api_response_error.code == 404
- pass
+ assert api_response_error.code == 404
test.run(handle_connect) |
f3931dd3eecdfa6be81273007663d4983aad5180 | tests/alembic/versions/132231d12fcd_test.py | tests/alembic/versions/132231d12fcd_test.py | """Test Migration
Revision ID: 132231d12fcd
Revises: None
Create Date: 2013-04-27 11:09:23.896698
"""
# revision identifiers, used by Alembic.
revision = '132231d12fcd'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
pass
def downgrade():
pass
| """Test Migration
Revision ID: 132231d12fcd
Revises: None
Create Date: 2013-04-27 11:09:23.896698
"""
# revision identifiers, used by Alembic.
revision = u'132231d12fcd'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
pass
def downgrade():
pass
| Fix for SQLAlchemy unicode warning | Fix for SQLAlchemy unicode warning
| Python | bsd-2-clause | hasgeek/coaster | ---
+++
@@ -7,7 +7,7 @@
"""
# revision identifiers, used by Alembic.
-revision = '132231d12fcd'
+revision = u'132231d12fcd'
down_revision = None
from alembic import op |
5743d7cfbc93b1c806e8f0a38d8000b82445810b | __init__.py | __init__.py | """
Python Bayesian hierarchical clustering (PyBHC).
Heller, K. A., & Ghahramani, Z. (2005). Bayesian Hierarchical
Clustering. Neuroscience, 6(section 2), 297-304.
doi:10.1145/1102351.1102389
"""
from bhc import bhc
from dists import NormalInverseWishart
| """
Python Bayesian hierarchical clustering (PyBHC).
Heller, K. A., & Ghahramani, Z. (2005). Bayesian Hierarchical
Clustering. Neuroscience, 6(section 2), 297-304.
doi:10.1145/1102351.1102389
"""
from bhc import bhc
from dists import NormalInverseWishart, NormalFixedCovar
from rbhc import rbhc
| Update importing of prob dists | Update importing of prob dists
Import the newly created fixed variance probability dist into
__init__.py for easier use outside of module.
| Python | bsd-3-clause | stuartsale/pyBHC | ---
+++
@@ -6,4 +6,5 @@
"""
from bhc import bhc
-from dists import NormalInverseWishart
+from dists import NormalInverseWishart, NormalFixedCovar
+from rbhc import rbhc |
69d3ec01ec3e9e9369b5c0425bc63cc7f2797b52 | __init__.py | __init__.py | import pyOmicron
import STS
__all__=["pyOmicron","STS"]
__version__ = 0.1
| import pyOmicron
try:
import STS
except:
import pyOmicron.STS
__all__=["pyOmicron","STS"]
__version__ = 0.1
| Fix import for python 3 | Fix import for python 3
| Python | apache-2.0 | scholi/pyOmicron | ---
+++
@@ -1,5 +1,8 @@
import pyOmicron
-import STS
+try:
+ import STS
+except:
+ import pyOmicron.STS
__all__=["pyOmicron","STS"]
__version__ = 0.1 |
c1acb68ef54309584816fbf5c93e38266accb2f0 | nova/db/sqlalchemy/session.py | nova/db/sqlalchemy/session.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Session Handling for SQLAlchemy backend
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from nova import flags
FLAGS = flags.FLAGS
_ENGINE = None
_MAKER = None
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _ENGINE
global _MAKER
if not _MAKER:
if not _ENGINE:
_ENGINE = create_engine(FLAGS.sql_connection, echo=False)
_MAKER = (sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit))
session = _MAKER()
return session
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Session Handling for SQLAlchemy backend
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from nova import flags
FLAGS = flags.FLAGS
_ENGINE = None
_MAKER = None
def get_session(autocommit=True, expire_on_commit=False):
"""Helper method to grab session"""
global _ENGINE
global _MAKER
if not _MAKER:
if not _ENGINE:
_ENGINE = create_engine(FLAGS.sql_connection, pool_recycle=3600, echo=False)
_MAKER = (sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit))
session = _MAKER()
return session
| Add the pool_recycle setting to enable connection pooling features for the sql engine. The setting is hard-coded to 3600 seconds (one hour) per the recommendation provided on sqlalchemy's site | Add the pool_recycle setting to enable connection pooling features for the sql engine. The setting is hard-coded to 3600 seconds (one hour) per the recommendation provided on sqlalchemy's site | Python | apache-2.0 | qwefi/nova,tanglei528/nova,houshengbo/nova_vmware_compute_driver,SUSE-Cloud/nova,gooddata/openstack-nova,gspilio/nova,TwinkleChawla/nova,felixma/nova,viggates/nova,TieWei/nova,varunarya10/nova_test_latest,Yusuke1987/openstack_template,eonpatapon/nova,ruslanloman/nova,petrutlucian94/nova_dev,fajoy/nova,gooddata/openstack-nova,yrobla/nova,petrutlucian94/nova_dev,rahulunair/nova,devoid/nova,joker946/nova,paulmathews/nova,belmiromoreira/nova,termie/nova-migration-demo,watonyweng/nova,psiwczak/openstack,Stavitsky/nova,maheshp/novatest,termie/pupa,klmitch/nova,LoHChina/nova,termie/nova-migration-demo,yrobla/nova,aristanetworks/arista-ovs-nova,jianghuaw/nova,projectcalico/calico-nova,Yuriy-Leonov/nova,blueboxgroup/nova,Metaswitch/calico-nova,leilihh/nova,affo/nova,KarimAllah/nova,eharney/nova,shahar-stratoscale/nova,superstack/nova,berrange/nova,NoBodyCam/TftpPxeBootBareMetal,CEG-FYP-OpenStack/scheduler,sacharya/nova,MountainWei/nova,cloudbase/nova,rahulunair/nova,tealover/nova,zzicewind/nova,OpenAcademy-OpenStack/nova-scheduler,jianghuaw/nova,eneabio/nova,shootstar/novatest,alaski/nova,mikalstill/nova,termie/pupa,rajalokan/nova,projectcalico/calico-nova,sridevikoushik31/nova,Triv90/Nova,CloudServer/nova,ntt-sic/nova,hanlind/nova,zhimin711/nova,houshengbo/nova_vmware_compute_driver,watonyweng/nova,dstroppa/openstack-smartos-nova-grizzly,scripnichenko/nova,Francis-Liu/animated-broccoli,CEG-FYP-OpenStack/scheduler,akash1808/nova,bigswitch/nova,fnordahl/nova,sridevikoushik31/nova,josephsuh/extra-specs,virtualopensystems/nova,shahar-stratoscale/nova,kimjaejoong/nova,redhat-openstack/nova,berrange/nova,gooddata/openstack-nova,sridevikoushik31/nova,zzicewind/nova,eonpatapon/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,petrutlucian94/nova,KarimAllah/nova,angdraug/nova,JioCloud/nova_test_latest,tangfeixiong/nova,gspilio/nova,maoy/zknova,bclau/nova,josephsuh/extra-specs,imsplitbit/nova,mahak/nova,sileht/deb-openstack-nova,devendermishrajio/nova,cloudbau/nova,Yusuke1987/openstack_template,vladikr/nova_drafts,Juniper/nova,ruslanloman/nova,rrader/nova-docker-plugin,double12gzh/nova,varunarya10/nova_test_latest,usc-isi/nova,mmnelemane/nova,LoHChina/nova,SUSE-Cloud/nova,zhimin711/nova,vmturbo/nova,termie/pupa,cloudbase/nova,alaski/nova,CiscoSystems/nova,MountainWei/nova,JianyuWang/nova,dstroppa/openstack-smartos-nova-grizzly,phenoxim/nova,DirectXMan12/nova-hacking,Stavitsky/nova,rajalokan/nova,yosshy/nova,vmturbo/nova,Triv90/Nova,plumgrid/plumgrid-nova,TwinkleChawla/nova,thomasem/nova,salv-orlando/MyRepo,fnordahl/nova,yatinkumbhare/openstack-nova,eneabio/nova,maoy/zknova,cloudbase/nova,dawnpower/nova,jeffrey4l/nova,mahak/nova,rahulunair/nova,apporc/nova,tanglei528/nova,tudorvio/nova,JioCloud/nova,orbitfp7/nova,termie/nova-migration-demo,barnsnake351/nova,NewpTone/stacklab-nova,mikalstill/nova,noironetworks/nova,usc-isi/nova,badock/nova,NoBodyCam/TftpPxeBootBareMetal,Juniper/nova,cloudbau/nova,tianweizhang/nova,redhat-openstack/nova,rickerc/nova_audit,mgagne/nova,eharney/nova,petrutlucian94/nova,aristanetworks/arista-ovs-nova,usc-isi/extra-specs,citrix-openstack-build/nova,houshengbo/nova_vmware_compute_driver,josephsuh/extra-specs,vmturbo/nova,JioCloud/nova,zaina/nova,leilihh/nova,luogangyi/bcec-nova,vmturbo/nova,rajalokan/nova,openstack/nova,dstroppa/openstack-smartos-nova-grizzly,cloudbase/nova-virtualbox,rickerc/nova_audit,BeyondTheClouds/nova,Juniper/nova,jianghuaw/nova,nikesh-mahalka/nova,sileht/deb-openstack-nova,alvarolopez/nova,savi-dev/nova,ewindisch/nova,felixma/nova,akash1808/nova,badock/nova,bgxavier/nova,adelina-t/nova,fajoy/nova,tianweizhang/nova,gooddata/openstack-nova,spring-week-topos/nova-week,paulmathews/nova,tealover/nova,KarimAllah/nova,sileht/deb-openstack-nova,plumgrid/plumgrid-nova,Yuriy-Leonov/nova,CiscoSystems/nova,silenceli/nova,sebrandon1/nova,isyippee/nova,belmiromoreira/nova,barnsnake351/nova,anotherjesse/nova,apporc/nova,anotherjesse/nova,jeffrey4l/nova,j-carpentier/nova,saleemjaveds/https-github.com-openstack-nova,saleemjaveds/https-github.com-openstack-nova,dawnpower/nova,Juniper/nova,bgxavier/nova,openstack/nova,alexandrucoman/vbox-nova-driver,silenceli/nova,ted-gould/nova,mahak/nova,edulramirez/nova,blueboxgroup/nova,orbitfp7/nova,joker946/nova,kimjaejoong/nova,NewpTone/stacklab-nova,scripnichenko/nova,akash1808/nova_test_latest,NeCTAR-RC/nova,viggates/nova,anotherjesse/nova,paulmathews/nova,luogangyi/bcec-nova,virtualopensystems/nova,ted-gould/nova,bclau/nova,fajoy/nova,JianyuWang/nova,shootstar/novatest,BeyondTheClouds/nova,iuliat/nova,mmnelemane/nova,superstack/nova,vladikr/nova_drafts,BeyondTheClouds/nova,Tehsmash/nova,NeCTAR-RC/nova,angdraug/nova,shail2810/nova,dims/nova,yrobla/nova,gspilio/nova,zaina/nova,sacharya/nova,takeshineshiro/nova,phenoxim/nova,NoBodyCam/TftpPxeBootBareMetal,devoid/nova,hanlind/nova,eneabio/nova,usc-isi/nova,CCI-MOC/nova,ntt-sic/nova,russellb/nova,superstack/nova,sebrandon1/nova,Metaswitch/calico-nova,whitepages/nova,cernops/nova,sebrandon1/nova,openstack/nova,klmitch/nova,leilihh/novaha,russellb/nova,savi-dev/nova,tudorvio/nova,CCI-MOC/nova,maelnor/nova,TieWei/nova,hanlind/nova,DirectXMan12/nova-hacking,sridevikoushik31/openstack,dims/nova,iuliat/nova,devendermishrajio/nova,mandeepdhami/nova,noironetworks/nova,devendermishrajio/nova_test_latest,maelnor/nova,sridevikoushik31/nova,DirectXMan12/nova-hacking,Francis-Liu/animated-broccoli,leilihh/novaha,JioCloud/nova_test_latest,salv-orlando/MyRepo,citrix-openstack-build/nova,jianghuaw/nova,usc-isi/extra-specs,tangfeixiong/nova,klmitch/nova,rrader/nova-docker-plugin,maoy/zknova,OpenAcademy-OpenStack/nova-scheduler,cernops/nova,NewpTone/stacklab-nova,whitepages/nova,imsplitbit/nova,akash1808/nova_test_latest,aristanetworks/arista-ovs-nova,CloudServer/nova,raildo/nova,devendermishrajio/nova_test_latest,cyx1231st/nova,psiwczak/openstack,eayunstack/nova,klmitch/nova,adelina-t/nova,mikalstill/nova,double12gzh/nova,russellb/nova,maheshp/novatest,thomasem/nova,rajalokan/nova,nikesh-mahalka/nova,Tehsmash/nova,shail2810/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,bigswitch/nova,salv-orlando/MyRepo,j-carpentier/nova,cyx1231st/nova,isyippee/nova,edulramirez/nova,maheshp/novatest,psiwczak/openstack,mandeepdhami/nova,savi-dev/nova,usc-isi/extra-specs,alexandrucoman/vbox-nova-driver,raildo/nova,yatinkumbhare/openstack-nova,qwefi/nova,mgagne/nova,yosshy/nova,Triv90/Nova,cloudbase/nova-virtualbox,spring-week-topos/nova-week,takeshineshiro/nova,ewindisch/nova,eayunstack/nova,alvarolopez/nova,sridevikoushik31/openstack,affo/nova,cernops/nova,sridevikoushik31/openstack | ---
+++
@@ -36,7 +36,7 @@
global _MAKER
if not _MAKER:
if not _ENGINE:
- _ENGINE = create_engine(FLAGS.sql_connection, echo=False)
+ _ENGINE = create_engine(FLAGS.sql_connection, pool_recycle=3600, echo=False)
_MAKER = (sessionmaker(bind=_ENGINE,
autocommit=autocommit,
expire_on_commit=expire_on_commit)) |
13fec51e6fa3f47d2f3669e789e9d432e092944a | celeryconfig.py | celeryconfig.py | from datetime import timedelta
from private import CELERY_BROKER_URL, CELERY_RESULT_BACKEND
BROKER_URL = CELERY_BROKER_URL
CELERY_RESULT_BACKEND = CELERY_RESULT_BACKEND
CELERY_TIMEZONE = 'UTC'
CELERY_INCLUDE = ['tasks.scraper_task']
CELERYBEAT_SCHEDULE = {
'scrape_users': {
'task': 'tasks.scraper_task.scraper_task',
'schedule': timedelta(minutes=1)
},
}
| from datetime import timedelta
from private import CELERY_BROKER_URL, CELERY_RESULT_BACKEND
#-------------------------------------------------------------------------------
BROKER_URL = CELERY_BROKER_URL
CELERY_RESULT_BACKEND = CELERY_RESULT_BACKEND
#-------------------------------------------------------------------------------
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
#-------------------------------------------------------------------------------
CELERY_TIMEZONE = 'UTC'
CELERY_INCLUDE = ['tasks.scraper_task']
CELERYBEAT_SCHEDULE = {
'scrape_users': {
'task': 'tasks.scraper_task.scraper_task',
'schedule': timedelta(minutes=1)
},
}
| Use json for task serialization | Use json for task serialization
| Python | mit | Trinovantes/MyAnimeList-Cover-CSS-Generator,Trinovantes/MyAnimeList-Cover-CSS-Generator | ---
+++
@@ -2,8 +2,17 @@
from private import CELERY_BROKER_URL, CELERY_RESULT_BACKEND
-BROKER_URL = CELERY_BROKER_URL
+#-------------------------------------------------------------------------------
+
+BROKER_URL = CELERY_BROKER_URL
CELERY_RESULT_BACKEND = CELERY_RESULT_BACKEND
+
+#-------------------------------------------------------------------------------
+
+CELERY_ACCEPT_CONTENT = ['json']
+CELERY_TASK_SERIALIZER = 'json'
+
+#-------------------------------------------------------------------------------
CELERY_TIMEZONE = 'UTC'
|
bdeb1196025c8f982390f0f298fa8b16b1883bce | mediaman/management/commands/generate_thumbs.py | mediaman/management/commands/generate_thumbs.py | from django.core.management.base import BaseCommand
import easy_thumbnails
from mediaman.models import ArtefactRepresentation
import os
class Command(BaseCommand):
help = "Generate thumbnails for Artefact Representations"
def handle(self, *args, **options):
unbuffered = os.fdopen(self.stdout.fileno(), 'w', 0)
self.stdout = unbuffered
ars = ArtefactRepresentation.objects.all()
self.stdout.write("Found %s images\n" % ars.count())
for ar in ars:
# self.stdout.write(str(ar.image) + "\n")
if ar.image.storage.exists(ar.image):
easy_thumbnails.files.generate_all_aliases(
ar.image, include_global=True)
self.stdout.write('.')
else:
self.stdout.write('n')
self.stdout.write("\nProcessed all images\n")
| from django.core.management.base import BaseCommand
import easy_thumbnails
from mediaman.models import ArtefactRepresentation
import os
#import ImageFile
from PIL import ImageFile
class Command(BaseCommand):
help = "Generate thumbnails for Artefact Representations"
def handle(self, *args, **options):
unbuffered = os.fdopen(self.stdout.fileno(), 'w', 0)
self.stdout = unbuffered
ImageFile.MAXBLOCK = 1024 * 1024 * 10 # default is 64k, fixes "Suspension not allowed here" error from PIL
ars = ArtefactRepresentation.objects.filter(public=True)
self.stdout.write("Found %s public images\n" % ars.count())
for ar in ars:
# self.stdout.write(str(ar.image) + "\n")
if ar.image.storage.exists(ar.image):
easy_thumbnails.files.generate_all_aliases(
ar.image, include_global=True)
self.stdout.write('.')
else:
self.stdout.write('n')
self.stdout.write("\nProcessed all images\n")
| Update bulk image generation command | Update bulk image generation command
| Python | bsd-3-clause | uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam | ---
+++
@@ -2,6 +2,8 @@
import easy_thumbnails
from mediaman.models import ArtefactRepresentation
import os
+#import ImageFile
+from PIL import ImageFile
class Command(BaseCommand):
@@ -10,9 +12,10 @@
def handle(self, *args, **options):
unbuffered = os.fdopen(self.stdout.fileno(), 'w', 0)
self.stdout = unbuffered
+ ImageFile.MAXBLOCK = 1024 * 1024 * 10 # default is 64k, fixes "Suspension not allowed here" error from PIL
- ars = ArtefactRepresentation.objects.all()
- self.stdout.write("Found %s images\n" % ars.count())
+ ars = ArtefactRepresentation.objects.filter(public=True)
+ self.stdout.write("Found %s public images\n" % ars.count())
for ar in ars:
# self.stdout.write(str(ar.image) + "\n") |
07ea0d8ec5c65f0fc94dc29f8b03402c571d3a42 | qipipe/interfaces/fix_dicom.py | qipipe/interfaces/fix_dicom.py | import os
from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits,
InputMultiPath, File, Directory, TraitedSpec)
from qipipe.staging.fix_dicom import fix_dicom_headers
class FixDicomInputSpec(BaseInterfaceInputSpec):
collection = traits.Str(desc='The image collection', mandatory=True)
subject = traits.Str(desc='The subject name', mandatory=True)
in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
out_files = traits.List(desc="The modified output files", trait=File, exists=True)
class FixDicom(BaseInterface):
"""The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers`
function."""
input_spec = FixDicomInputSpec
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_files'] = self._out_files
return outputs
| import os
from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits,
InputMultiPath, File, Directory, TraitedSpec)
from qipipe.staging.fix_dicom import fix_dicom_headers
class FixDicomInputSpec(BaseInterfaceInputSpec):
collection = traits.Str(desc='The image collection', mandatory=True)
subject = traits.Str(desc='The subject name', mandatory=True)
in_file = File(exists=True, desc='The input DICOM file', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
out_file = File(desc="The modified output file", exists=True)
class FixDicom(BaseInterface):
"""The FixDicom interface wraps the :meth:`qipipe.staging.fix_dicom.fix_dicom_headers`
function."""
input_spec = FixDicomInputSpec
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = self._out_file
return outputs
| Fix only one file at a time. | Fix only one file at a time.
| Python | bsd-2-clause | ohsu-qin/qipipe | ---
+++
@@ -8,11 +8,11 @@
subject = traits.Str(desc='The subject name', mandatory=True)
- in_files = InputMultiPath(File(exists=True), desc='The input DICOM files', mandatory=True)
+ in_file = File(exists=True, desc='The input DICOM file', mandatory=True)
class FixDicomOutputSpec(TraitedSpec):
- out_files = traits.List(desc="The modified output files", trait=File, exists=True)
+ out_file = File(desc="The modified output file", exists=True)
class FixDicom(BaseInterface):
@@ -24,10 +24,10 @@
output_spec = FixDicomOutputSpec
def _run_interface(self, runtime):
- self._out_files = fix_dicom_headers(self.inputs.collection, self.inputs.subject, *self.inputs.in_files)
+ self._out_file = fix_dicom_headers(self.inputs.collection, self.inputs.subject, self.inputs.in_file)
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
- outputs['out_files'] = self._out_files
+ outputs['out_file'] = self._out_file
return outputs |
ee8f04c2e68eddad48db3907d1d5e4ecc5daa4a4 | Functions/Conversation.py | Functions/Conversation.py | from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'Responds to greetings and such'
def GetResponse(self, message):
if message.Type != 'PRIVMSG':
return
match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|mornin[g']?|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|night|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
message.MessageString,
re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say,
'%s %s' % (match.group('greeting'), message.User.Name),
message.ReplyTo) | from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from Function import Function
from GlobalVars import *
import re
class Instantiate(Function):
Help = 'Responds to greetings and such'
def GetResponse(self, message):
if message.Type != 'PRIVMSG':
return
match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|(good |g'?)?((mornin|evenin)[g']?|ni(ght|ni))|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
message.MessageString,
re.IGNORECASE)
if match:
return IRCResponse(ResponseType.Say,
'%s %s' % (match.group('greeting'), message.User.Name),
message.ReplyTo)
| Add more greetings to the needs-multilining regex | Add more greetings to the needs-multilining regex | Python | mit | MatthewCox/PyMoronBot,Heufneutje/PyMoronBot,DesertBot/DesertBot | ---
+++
@@ -12,7 +12,7 @@
if message.Type != 'PRIVMSG':
return
- match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|mornin[g']?|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|night|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
+ match = re.search("^(?P<greeting>(wa+s+|')?so?u+p|hi(ya)?|hey|hello|'?lo|(good |g'?)?((mornin|evenin)[g']?|ni(ght|ni))|greetings|bonjour|salut|howdy|'?yo|o?hai|mojn|hej|dongs|ahoy( hoy)?|salutations|g'?day|hola|bye|herrow)( there)?,?[ ]%s([^a-zA-Z0-9_\|`\[\]\^-]|$)" % CurrentNick,
message.MessageString,
re.IGNORECASE)
if match: |
8808fe8a4d3a8cf36a91fe69b2d1002eddc534a3 | py/garage/garage/startups/sql.py | py/garage/garage/startups/sql.py | """Template of DbEngineComponent."""
__all__ = [
'make_db_engine_component',
]
import logging
import garage.sql.sqlite
from garage import components
from garage.startups.logging import LoggingComponent
def make_db_engine_component(
*,
package_name,
argument_group,
argument_prefix):
DB_URL = '%s_db_url' % argument_prefix.replace('-', '_')
class DbEngineComponent(components.Component):
require = components.ARGS
provide = components.make_fqname_tuple(package_name, 'engine')
def add_arguments(self, parser):
group = parser.add_argument_group(argument_group)
group.add_argument(
'--%s-db-url' % argument_prefix, required=True,
help="""set database URL""")
def check_arguments(self, parser, args):
db_url = getattr(args, DB_URL)
if not db_url.startswith('sqlite'):
parser.error('only support sqlite at the moment: %s' % db_url)
def make(self, require):
db_url = getattr(require.args, DB_URL)
echo = logging.getLogger().isEnabledFor(LoggingComponent.TRACE)
return garage.sql.sqlite.create_engine(db_url, echo=echo)
# Hack for manipulating call order.
DbEngineComponent.add_arguments.__module__ = package_name
DbEngineComponent.check_arguments.__module__ = package_name
return DbEngineComponent
| __all__ = [
'make_db_engine_component',
]
import logging
import garage.sql.sqlite
from garage import components
from garage.startups.logging import LoggingComponent
def make_db_engine_component(
*, package_name,
argument_group, argument_prefix,
check_same_thread=False):
"""DbEngineComponent Generator."""
DB_URL = '%s_db_url' % argument_prefix.replace('-', '_')
class DbEngineComponent(components.Component):
require = components.ARGS
provide = components.make_fqname_tuple(package_name, 'engine')
def add_arguments(self, parser):
group = parser.add_argument_group(argument_group)
group.add_argument(
'--%s-db-url' % argument_prefix, required=True,
help="""set database URL""")
def check_arguments(self, parser, args):
db_url = getattr(args, DB_URL)
if not db_url.startswith('sqlite'):
parser.error('only support sqlite at the moment: %s' % db_url)
def make(self, require):
db_url = getattr(require.args, DB_URL)
echo = logging.getLogger().isEnabledFor(LoggingComponent.TRACE)
return garage.sql.sqlite.create_engine(
db_url,
check_same_thread=check_same_thread,
echo=echo,
)
# Hack for manipulating call order
DbEngineComponent.add_arguments.__module__ = package_name
DbEngineComponent.check_arguments.__module__ = package_name
return DbEngineComponent
| Add check_same_thread argument to make_db_engine_component | Add check_same_thread argument to make_db_engine_component
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | ---
+++
@@ -1,5 +1,3 @@
-"""Template of DbEngineComponent."""
-
__all__ = [
'make_db_engine_component',
]
@@ -12,10 +10,10 @@
def make_db_engine_component(
- *,
- package_name,
- argument_group,
- argument_prefix):
+ *, package_name,
+ argument_group, argument_prefix,
+ check_same_thread=False):
+ """DbEngineComponent Generator."""
DB_URL = '%s_db_url' % argument_prefix.replace('-', '_')
@@ -39,9 +37,13 @@
def make(self, require):
db_url = getattr(require.args, DB_URL)
echo = logging.getLogger().isEnabledFor(LoggingComponent.TRACE)
- return garage.sql.sqlite.create_engine(db_url, echo=echo)
+ return garage.sql.sqlite.create_engine(
+ db_url,
+ check_same_thread=check_same_thread,
+ echo=echo,
+ )
- # Hack for manipulating call order.
+ # Hack for manipulating call order
DbEngineComponent.add_arguments.__module__ = package_name
DbEngineComponent.check_arguments.__module__ = package_name
|
5124e59cf6bb264da6d58043e068b63647685167 | accounts/tests.py | accounts/tests.py | """accounts app unittests
"""
from django.test import TestCase
from django.contrib.auth import get_user_model
from accounts.models import LoginToken
TEST_EMAIL = '[email protected]'
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should response with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
class TokenModelTest(TestCase):
"""Tests for login token model.
"""
def test_unique_tokens_generated(self):
"""Two tokens generated should be unique.
"""
token1 = LoginToken(TEST_EMAIL)
token2 = LoginToken(TEST_EMAIL)
self.assertNotEqual(token1, token2)
| """accounts app unittests
"""
from time import sleep
from django.contrib.auth import get_user_model
from django.test import TestCase
from accounts.token import LoginTokenGenerator
TEST_EMAIL = '[email protected]'
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should response with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
class UserModelTest(TestCase):
"""Tests for passwordless user model.
"""
def test_user_valid_with_only_email(self):
"""Should not raise if the user model is happy with email only.
"""
user = get_user_model()(email=TEST_EMAIL)
user.full_clean()
def test_users_are_authenticated(self):
"""User objects should be authenticated for views/templates.
"""
user = get_user_model()()
self.assertTrue(user.is_authenticated())
class TokenGeneratorTest(TestCase):
"""Tests for login token model.
"""
def setUp(self):
self.signer = LoginTokenGenerator()
def test_unique_tokens_generated(self):
"""Tokens generated one second apart should differ.
"""
token1 = self.signer.create_token(TEST_EMAIL)
sleep(1)
token2 = self.signer.create_token(TEST_EMAIL)
self.assertNotEqual(token1, token2)
| Update test to not use a db model | Update test to not use a db model
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd | ---
+++
@@ -1,11 +1,12 @@
"""accounts app unittests
"""
+from time import sleep
+
+from django.contrib.auth import get_user_model
from django.test import TestCase
-from django.contrib.auth import get_user_model
-from accounts.models import LoginToken
-
+from accounts.token import LoginTokenGenerator
TEST_EMAIL = '[email protected]'
@@ -41,14 +42,19 @@
self.assertTrue(user.is_authenticated())
-class TokenModelTest(TestCase):
+class TokenGeneratorTest(TestCase):
"""Tests for login token model.
"""
+ def setUp(self):
+ self.signer = LoginTokenGenerator()
+
def test_unique_tokens_generated(self):
- """Two tokens generated should be unique.
+ """Tokens generated one second apart should differ.
"""
- token1 = LoginToken(TEST_EMAIL)
- token2 = LoginToken(TEST_EMAIL)
+ token1 = self.signer.create_token(TEST_EMAIL)
+ sleep(1)
+ token2 = self.signer.create_token(TEST_EMAIL)
self.assertNotEqual(token1, token2)
+ |
74a5cad21fb726384ab53f2ca9b711cc8298bfb9 | accounts/tests.py | accounts/tests.py | """accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
def test_uses_welcome_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
| """accounts app unittests
"""
from django.test import TestCase
class WelcomePageTest(TestCase):
"""Tests relating to the welcome_page view.
"""
def test_uses_welcome_template(self):
"""The root url should response with the welcome page template.
"""
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html')
| Add docstrings to unit test | Add docstrings to unit test
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd | ---
+++
@@ -5,6 +5,12 @@
class WelcomePageTest(TestCase):
+ """Tests relating to the welcome_page view.
+
+ """
def test_uses_welcome_template(self):
+ """The root url should response with the welcome page template.
+
+ """
response = self.client.get('/')
self.assertTemplateUsed(response, 'accounts/welcome.html') |
082154a93a79e854be6bf55b34c7db3e4a27173f | setup.py | setup.py | from setuptools import setup
setup(name='slacker',
version='0.3.9',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
author_email='[email protected]',
url='http://github.com/os/slacker/',
install_requires=['requests >= 2.2.1'],
license='http://www.apache.org/licenses/LICENSE-2.0',
keywords='slack api')
| from setuptools import setup
setup(name='slacker',
version='0.4.0',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak',
author_email='[email protected]',
url='http://github.com/os/slacker/',
install_requires=['requests >= 2.2.1'],
license='http://www.apache.org/licenses/LICENSE-2.0',
keywords='slack api')
| Set version number to 0.4.0. | Set version number to 0.4.0.
| Python | apache-2.0 | BetterWorks/slacker,wkentaro/slacker,olasitarska/slacker,os/slacker,wasabi0522/slacker,kashyap32/slacker,STANAPO/slacker,techartorg/slacker,hreeder/slacker,dastergon/slacker | ---
+++
@@ -2,7 +2,7 @@
setup(name='slacker',
- version='0.3.9',
+ version='0.4.0',
packages=['slacker'],
description='Slack API client',
author='Oktay Sancak', |
46e440168f66a5d599814d0e66771f6ce7a1ddc1 | setup.py | setup.py | try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
"ar": ["arpy==1.1.1"],
}
)
| try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.6',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
"ar": ["arpy==1.1.1"],
}
)
| Mark requirement for python 3.6 | Mark requirement for python 3.6
| Python | bsd-2-clause | angr/cle | ---
+++
@@ -14,7 +14,7 @@
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
- python_requires='>=3.5',
+ python_requires='>=3.6',
packages=packages,
install_requires=[
'pyelftools>=0.25', |
26f1506607a2042d508dc69f5a155ed88668d22a | setup.py | setup.py | import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "ideone",
version = "0.0.1",
author = "Joe Schafer",
author_email = "[email protected]",
url = "http://github.com/jschaf",
description = "A Python binding to the Ideone API.",
license = "BSD",
keywords = "API ideone codepad",
packages = ['ideone'],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
install_requires=['suds',]
)
| import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "ideone",
version = "0.0.1",
author = "Joe Schafer",
author_email = "[email protected]",
url = "http://github.com/jschaf/ideone-api/",
description = "A Python binding to the Ideone API.",
license = "BSD",
keywords = "API ideone codepad",
packages = ['ideone'],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
install_requires=['suds',]
)
| Fix url to point to repository. | Fix url to point to repository.
| Python | bsd-3-clause | jschaf/ideone-api | ---
+++
@@ -9,7 +9,7 @@
version = "0.0.1",
author = "Joe Schafer",
author_email = "[email protected]",
- url = "http://github.com/jschaf",
+ url = "http://github.com/jschaf/ideone-api/",
description = "A Python binding to the Ideone API.",
license = "BSD",
keywords = "API ideone codepad", |
9e871bbce34af7d1150181af550e994ba26730e8 | setup.py | setup.py | from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup (
name = 'pyvault',
version = '1.4.4.2',
description = 'Python password manager',
long_description = long_description,
author = 'Gabriel Bordeaux',
author_email = '[email protected]',
url = 'https://github.com/gabfl/vault',
license = 'MIT',
packages = ['vault', 'vault.lib'],
package_dir = { 'vault': 'src' },
install_requires = ['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points = {
'console_scripts': [
'vault = vault.vault:main',
],
},
)
| from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup (
name = 'pyvault',
version = '1.4.4.3',
description = 'Python password manager',
long_description = long_description,
author = 'Gabriel Bordeaux',
author_email = '[email protected]',
url = 'https://github.com/gabfl/vault',
license = 'MIT',
packages = ['vault', 'vault.lib'],
package_dir = { 'vault': 'src' },
install_requires = ['pycryptodome', 'pyperclip', 'tabulate', 'argparse', 'passwordgenerator'], # external dependencies
entry_points = {
'console_scripts': [
'vault = vault.vault:main',
],
},
)
| Bump version for new build | Bump version for new build
| Python | mit | gabfl/vault | ---
+++
@@ -8,7 +8,7 @@
setup (
name = 'pyvault',
- version = '1.4.4.2',
+ version = '1.4.4.3',
description = 'Python password manager',
long_description = long_description,
author = 'Gabriel Bordeaux', |
3b92b81594668cdd24f24fa32a2c5d61f908d22d | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
from os import path
import sys
if sys.version_info < (3, 4):
sys.exit('DataJoint is only supported on Python 3.4 or higher')
here = path.abspath(path.dirname(__file__))
long_description = "A relational data framework for scientific data pipelines with MySQL backend."
# read in version number
with open(path.join(here, 'datajoint', 'version.py')) as f:
exec(f.read())
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().split()
setup(
name='datajoint',
version=__version__,
description="A relational data pipeline framework.",
long_description=long_description,
author='Dimitri Yatsenko',
author_email='[email protected]',
license="GNU LGPL",
url='https://datajoint.io',
keywords='database organization',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=requirements,
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
from os import path
import sys
min_py_version = (3, 4)
if sys.version_info < min_py_version:
sys.exit('DataJoint is only supported on Python {}.{} or higher'.format(*min_py_version))
here = path.abspath(path.dirname(__file__))
long_description = "A relational data framework for scientific data pipelines with MySQL backend."
# read in version number
with open(path.join(here, 'datajoint', 'version.py')) as f:
exec(f.read())
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().split()
setup(
name='datajoint',
version=__version__,
description="A relational data pipeline framework.",
long_description=long_description,
author='Dimitri Yatsenko',
author_email='[email protected]',
license="GNU LGPL",
url='https://datajoint.io',
keywords='database organization',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=requirements,
python_requires='~={}.{}'.format(*min_py_version)
)
| Update minimum python versioning checks | Update minimum python versioning checks
| Python | lgpl-2.1 | eywalker/datajoint-python,dimitri-yatsenko/datajoint-python,datajoint/datajoint-python | ---
+++
@@ -3,8 +3,10 @@
from os import path
import sys
-if sys.version_info < (3, 4):
- sys.exit('DataJoint is only supported on Python 3.4 or higher')
+min_py_version = (3, 4)
+
+if sys.version_info < min_py_version:
+ sys.exit('DataJoint is only supported on Python {}.{} or higher'.format(*min_py_version))
here = path.abspath(path.dirname(__file__))
@@ -29,4 +31,5 @@
keywords='database organization',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=requirements,
+ python_requires='~={}.{}'.format(*min_py_version)
) |
e3ca5971d7a1cefccb7b412fe17fa5951b5cdc58 | setup.py | setup.py | import os
from setuptools import setup
PACKAGE_VERSION = '0.3'
def version():
def version_file(mode='r'):
return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode)
if os.getenv('TRAVIS'):
with version_file('w') as verfile:
verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER')))
with version_file() as verfile:
data = verfile.readlines()
return data[0].strip()
setup(
name='osaapi',
version=version(),
author='apsliteteam, oznu',
author_email='[email protected]',
packages=['osaapi'],
url='https://aps.odin.com',
license='Apache License',
description='A python client for the Odin Service Automation (OSA) and billing APIs.',
long_description=open('README.md').read(),
)
| import os
from setuptools import setup
PACKAGE_VERSION = '0.3'
def version():
def version_file(mode='r'):
return open(os.path.join(os.path.dirname(os.path.abspath(__file__), 'version.txt')), mode)
if os.getenv('TRAVIS'):
with version_file('w') as verfile:
verfile.write('{0}.{1}'.format(PACKAGE_VERSION, os.getenv('TRAVIS_BUILD_NUMBER')))
with version_file() as verfile:
data = verfile.readlines()
return data[0].strip()
setup(
name='osaapi',
version=version(),
author='apsliteteam, oznu',
author_email='[email protected]',
packages=['osaapi'],
url='https://aps.odin.com',
license='Apache License',
description='A python client for the Odin Service Automation (OSA) and billing APIs.',
long_description=open('README.md').read(),
)
| Fix issue with path variable | Fix issue with path variable
| Python | apache-2.0 | odin-public/osaAPI | ---
+++
@@ -7,7 +7,7 @@
def version():
def version_file(mode='r'):
- return open(os.path.dirname(os.path.abspath(__file__), 'version.txt'), mode)
+ return open(os.path.join(os.path.dirname(os.path.abspath(__file__), 'version.txt')), mode)
if os.getenv('TRAVIS'):
with version_file('w') as verfile: |
48cd23e15cad7ce6a3db916d2287df7dcd98b482 | setup.py | setup.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
def version():
import gocd
return gocd.__version__
setup(
name='gocd_cli',
author='Björn Andersson',
author_email='[email protected]',
license='MIT License',
version=version(),
packages=find_packages(exclude=('tests',)),
namespace_packages=('gocd_cli', 'gocd_cli.commands',),
cmdclass={'test': PyTest},
requires=[
'gocd_cli',
],
tests_require=[
'pytest',
],
)
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = []
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.pytest_args)
sys.exit(errno)
def version():
import gocd_cli
return gocd_cli.__version__
setup(
name='gocd_cli',
author='Björn Andersson',
author_email='[email protected]',
license='MIT License',
version=version(),
packages=find_packages(exclude=('tests',)),
namespace_packages=('gocd_cli', 'gocd_cli.commands',),
cmdclass={'test': PyTest},
requires=[
'gocd_cli',
],
tests_require=[
'pytest',
],
)
| Set the version from gocd_cli instead of gocd | Set the version from gocd_cli instead of gocd
| Python | mit | gaqzi/py-gocd-cli,gaqzi/gocd-cli | ---
+++
@@ -25,8 +25,8 @@
def version():
- import gocd
- return gocd.__version__
+ import gocd_cli
+ return gocd_cli.__version__
setup( |
e05a0bbe3b49c1e90aa245c52aab982ef49c8c26 | setup.py | setup.py | import sys
from setuptools import setup, find_packages
import querylist
unittest2_module = ''
if sys.version_info < (2, 7):
# spec causes python setup.py test to fail. This import fixes that for
# some reason.
import multiprocessing # noqa
# If we're still on python 2.6, we need unittest2
unittest2_module = 'unittest2<1.2'
setup(
name='querylist',
version=querylist.__version__,
url='https://github.com/thomasw/querylist',
download_url='https://github.com/thomasw/querylist/downloads',
author=querylist.__author__,
author_email='[email protected]',
description='This package provides a QueryList class with django '
'ORM-esque filtering, excluding, and getting for lists. It '
'also provides BetterDict, a dot lookup/assignment capable '
'wrapper for dicts that is 100% backwards compatible.',
packages=find_packages(),
tests_require=[
'nose>=1.3.6,<1.4',
'spec>=1.2.2,<1.3',
unittest2_module
],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries',
],
test_suite='nose.collector',
)
| import sys
from setuptools import setup, find_packages
import querylist
tests_require = [
'nose>=1.3.6,<1.4',
'spec>=1.2.2,<1.3',
]
if sys.version_info < (2, 7):
# spec causes python setup.py test to fail. This import fixes that for
# some reason.
import multiprocessing # noqa
# If we're still on python 2.6, we need unittest2
tests_require.append('unittest2<1.2')
setup(
name='querylist',
version=querylist.__version__,
url='https://github.com/thomasw/querylist',
download_url='https://github.com/thomasw/querylist/downloads',
author=querylist.__author__,
author_email='[email protected]',
description='This package provides a QueryList class with django '
'ORM-esque filtering, excluding, and getting for lists. It '
'also provides BetterDict, a dot lookup/assignment capable '
'wrapper for dicts that is 100% backwards compatible.',
packages=find_packages(),
tests_require=tests_require,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries',
],
test_suite='nose.collector',
)
| Fix issue with building the wheel | Fix issue with building the wheel
The build trips on an empty string requirement, throwing:
```
$ python setup.py sdist bdist_wheel
...
ValueError: ('No requirements found', '')
```
| Python | mit | thomasw/querylist | ---
+++
@@ -5,7 +5,10 @@
import querylist
-unittest2_module = ''
+tests_require = [
+ 'nose>=1.3.6,<1.4',
+ 'spec>=1.2.2,<1.3',
+]
if sys.version_info < (2, 7):
# spec causes python setup.py test to fail. This import fixes that for
@@ -13,8 +16,7 @@
import multiprocessing # noqa
# If we're still on python 2.6, we need unittest2
- unittest2_module = 'unittest2<1.2'
-
+ tests_require.append('unittest2<1.2')
setup(
name='querylist',
@@ -28,11 +30,7 @@
'also provides BetterDict, a dot lookup/assignment capable '
'wrapper for dicts that is 100% backwards compatible.',
packages=find_packages(),
- tests_require=[
- 'nose>=1.3.6,<1.4',
- 'spec>=1.2.2,<1.3',
- unittest2_module
- ],
+ tests_require=tests_require,
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License', |
7f72ac060bef98faa09fc729707cee5d0bafbf87 | setup.py | setup.py | # -*-coding:utf-8-*-
from setuptools import setup
setup(
name="rocketchat_API",
version="1.12.0",
packages=["rocketchat_API", "rocketchat_API.APIExceptions"],
url="https://github.com/jadolg/rocketchat_API",
license="MIT",
author="Jorge Alberto Díaz Orozco",
author_email="[email protected]",
description="Python API wrapper for Rocket.Chat",
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
install_requires=("requests",),
)
| # -*-coding:utf-8-*-
from setuptools import setup
setup(
name="rocketchat_API",
version="1.12.1",
packages=["rocketchat_API", "rocketchat_API.APIExceptions", "rocketchat_API.APISections"],
url="https://github.com/jadolg/rocketchat_API",
license="MIT",
author="Jorge Alberto Díaz Orozco",
author_email="[email protected]",
description="Python API wrapper for Rocket.Chat",
long_description=open("README.md", "r").read(),
long_description_content_type="text/markdown",
install_requires=("requests",),
)
| Add APISections to the python package | Add APISections to the python package
| Python | mit | jadolg/rocketchat_API | ---
+++
@@ -4,8 +4,8 @@
setup(
name="rocketchat_API",
- version="1.12.0",
- packages=["rocketchat_API", "rocketchat_API.APIExceptions"],
+ version="1.12.1",
+ packages=["rocketchat_API", "rocketchat_API.APIExceptions", "rocketchat_API.APISections"],
url="https://github.com/jadolg/rocketchat_API",
license="MIT",
author="Jorge Alberto Díaz Orozco", |
268976034ad508c2ef48dec60da40dec57af824f | setup.py | setup.py | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "DragonCreole",
packages = ["dragoncreole"],
version = "0.1.0",
description = "Optimized parser for creole-like markup language",
author = "Zauber Paracelsus",
author_email = "[email protected]",
url = "http://github.com/zauberparacelsus/dragoncreole",
download_url = "https://github.com/zauberparacelsus/dragoncreole/tarball/0.1",
keywords = ["parser", "markup", "html"],
install_requires= [
'html2text'
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Markup :: HTML"
],
long_description = "",
cmdclass = {"build_ext": build_ext},
ext_modules = [Extension("DragonCreoleC", ["dragoncreole/dragoncreole.py"])]
)
| from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
name = "DragonCreole",
packages = ["dragoncreole"],
version = "0.1.0",
description = "Optimized parser for creole-like markup language",
author = "Zauber Paracelsus",
author_email = "[email protected]",
url = "http://github.com/zauberparacelsus/dragoncreole",
download_url = "https://github.com/zauberparacelsus/dragoncreole/tarball/0.1",
keywords = ["parser", "markup", "html"],
install_requires= [
'html2text',
'cython'
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Markup :: HTML"
],
long_description = "",
cmdclass = {"build_ext": build_ext},
ext_modules = [Extension("dragoncreole.DragonCreoleC", ["dragoncreole/dragoncreole.py"])]
)
| Tweak for building with cython | Tweak for building with cython
| Python | mpl-2.0 | zauberparacelsus/dragoncreole,zauberparacelsus/dragoncreole | ---
+++
@@ -13,7 +13,8 @@
download_url = "https://github.com/zauberparacelsus/dragoncreole/tarball/0.1",
keywords = ["parser", "markup", "html"],
install_requires= [
- 'html2text'
+ 'html2text',
+ 'cython'
],
classifiers = [
"Programming Language :: Python",
@@ -27,5 +28,5 @@
],
long_description = "",
cmdclass = {"build_ext": build_ext},
- ext_modules = [Extension("DragonCreoleC", ["dragoncreole/dragoncreole.py"])]
+ ext_modules = [Extension("dragoncreole.DragonCreoleC", ["dragoncreole/dragoncreole.py"])]
) |
ef1e44cb6a815b3fc86faf4d90fba407270aa02b | setup.py | setup.py | from setuptools import setup
exec([l for l in open("flask_mwoauth/__init__.py") if l.startswith('__version__')][0])
setup(name='flask-mwoauth',
version=__version__,
description='Flask blueprint to connect to a MediaWiki OAuth server',
url='http://github.com/valhallasw/flask-mwoauth',
author='Merlijn van Deen',
author_email='[email protected]',
license='MIT',
packages=['flask_mwoauth'],
install_requires=['flask-oauth'],
zip_safe=True,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| from setuptools import setup
exec([l for l in open("flask_mwoauth/__init__.py")
if l.startswith('__version__')][0])
setup(name='flask-mwoauth',
version=__version__,
description='Flask blueprint to connect to a MediaWiki OAuth server',
url='http://github.com/valhallasw/flask-mwoauth',
author='Merlijn van Deen',
author_email='[email protected]',
license='MIT',
packages=['flask_mwoauth'],
install_requires=['flask-oauth', 'requests>=2.0.1'],
zip_safe=True,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules"]
)
| Add requests module in the dependencies | Add requests module in the dependencies
| Python | mit | sitic/flask-mwoauth,valhallasw/flask-mwoauth,sitic/flask-mwoauth,valhallasw/flask-mwoauth | ---
+++
@@ -1,6 +1,7 @@
from setuptools import setup
-exec([l for l in open("flask_mwoauth/__init__.py") if l.startswith('__version__')][0])
+exec([l for l in open("flask_mwoauth/__init__.py")
+ if l.startswith('__version__')][0])
setup(name='flask-mwoauth',
version=__version__,
@@ -10,7 +11,7 @@
author_email='[email protected]',
license='MIT',
packages=['flask_mwoauth'],
- install_requires=['flask-oauth'],
+ install_requires=['flask-oauth', 'requests>=2.0.1'],
zip_safe=True,
classifiers=[
"Development Status :: 3 - Alpha",
@@ -19,6 +20,5 @@
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
- "Topic :: Software Development :: Libraries :: Python Modules",
- ]
-)
+ "Topic :: Software Development :: Libraries :: Python Modules"]
+ ) |
8d034ca0c30166ec3972d0f8db83e00ff4f8055f | setup.py | setup.py | from distutils.core import setup
setup(name='steamfootbridge',
version='0.0.1',
packages=['steamfootbridge'],
scripts=['bin/steamfootbridge'],
)
| from setuptools import setup
setup(name='steamfootbridge',
version='0.0.1',
packages=['steamfootbridge'],
scripts=['bin/steamfootbridge'],
install_requires=[
'steamodd',
],
)
| Convert Python installation to PyPi | Convert Python installation to PyPi
This does mean python-pip or similar will need to be installed on the
system.
| Python | mit | sirnuke/steamfootbridge,sirnuke/steamfootbridge | ---
+++
@@ -1,7 +1,10 @@
-from distutils.core import setup
+from setuptools import setup
setup(name='steamfootbridge',
version='0.0.1',
packages=['steamfootbridge'],
scripts=['bin/steamfootbridge'],
+ install_requires=[
+ 'steamodd',
+ ],
) |
259396028862e4c7b2a00e2c908be2b8ffeee991 | setup.py | setup.py | #!/usr/bin/env python
import os
import distutils.core
import sys
try:
import setuptools
except ImportError:
pass
try:
license = open('LICENSE').read()
except:
license = None
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def desc():
info = read('README.rst')
try:
return info + '\n\n' + read('doc/changelog.rst')
except IOError:
return info
distutils.core.setup(
name='sockjs-tornado',
version='1.0.0',
author='Serge S. Koval',
author_email='[email protected]',
packages=['sockjs', 'sockjs.tornado', 'sockjs.tornado.transports'],
namespace_packages=['sockjs'],
scripts=[],
url='http://github.com/mrjoes/sockjs-tornado/',
license=license,
description='SockJS python server implementation on top of Tornado framework',
long_description=desc(),
requires=['tornado'],
install_requires=[
'tornado >= 2.1.1'
]
)
| #!/usr/bin/env python
import os
import distutils.core
import sys
try:
import setuptools
except ImportError:
pass
try:
license = open('LICENSE').read()
except:
license = None
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def desc():
info = read('README.rst')
try:
return info + '\n\n' + read('doc/changelog.rst')
except IOError:
return info
distutils.core.setup(
name='sockjs-tornado',
version='1.0.0',
author='Serge S. Koval',
author_email='[email protected]',
packages=['sockjs', 'sockjs.tornado', 'sockjs.tornado.transports'],
namespace_packages=['sockjs'],
scripts=[],
url='http://github.com/mrjoes/sockjs-tornado/',
license=license,
description='SockJS python server implementation on top of Tornado framework',
long_description=desc(),
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
],
requires=['tornado'],
install_requires=[
'tornado >= 2.1.1'
]
)
| Add the Python version and license classifiers | Add the Python version and license classifiers
Signed-off-by: Tomas Sedovic <[email protected]>
| Python | mit | codepython/sockjs-tornado,ImaginationForPeople/sockjs-tornado,codepython/sockjs-tornado,barseghyanartur/sockjs-tornado,mrjoes/sockjs-tornado,ImaginationForPeople/sockjs-tornado,DexterInd/sockjs-tornado,codepython/sockjs-tornado,MrJoes/sockjs-tornado,DexterInd/sockjs-tornado,pjknkda/sockjs-tornado,DexterInd/sockjs-tornado,pjknkda/sockjs-tornado,ImaginationForPeople/sockjs-tornado,pjknkda/sockjs-tornado | ---
+++
@@ -38,6 +38,16 @@
license=license,
description='SockJS python server implementation on top of Tornado framework',
long_description=desc(),
+ classifiers=[
+ 'License :: OSI Approved :: MIT License',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.2',
+ 'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ ],
requires=['tornado'],
install_requires=[
'tornado >= 2.1.1' |
9487887223be3a321c507e65210c2b651060fac3 | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = [
'requests',
'rauth'
]
setup(
name='fatsecret',
version='0.2.1',
description='Python wrapper for FatSecret REST API',
url='github.com/walexnelson/pyfatsecret',
license='MIT',
author='Alex Nelson',
author_email='[email protected]',
install_requires=requires,
py_modules=("fatsecret",),
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3'
]
)
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = [
'requests',
'rauth'
]
setup(
name='fatsecret',
version='0.2.1',
description='Python wrapper for FatSecret REST API',
url='github.com/walexnelson/pyfatsecret',
license='MIT',
author='Alex Nelson',
author_email='[email protected]',
install_requires=requires,
packages=["fatsecret"],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3'
]
)
| Use packaging instead of a module | Use packaging instead of a module | Python | mit | walexnelson/pyfatsecret | ---
+++
@@ -17,7 +17,7 @@
author='Alex Nelson',
author_email='[email protected]',
install_requires=requires,
- py_modules=("fatsecret",),
+ packages=["fatsecret"],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta', |
9c9319516875875707f1d922d9dd20d9f2c1ce6e | setup.py | setup.py | from setuptools import setup
from util import __version__
setup(
name="util",
author="Olav Vahtras",
author_email="[email protected]",
version=__version__,
url="https://github.com/vahtras/util",
packages=["util"],
install_requires=["numpy", "scipy"],
)
| from setuptools import setup
from util import __version__
setup(
name="blocked-matrix-utils",
author="Olav Vahtras",
author_email="[email protected]",
version=__version__,
url="https://github.com/vahtras/util",
packages=["util"],
install_requires=["numpy", "scipy"],
)
| Change name taken in pypi | Change name taken in pypi
| Python | mit | vahtras/util | ---
+++
@@ -2,7 +2,7 @@
from util import __version__
setup(
- name="util",
+ name="blocked-matrix-utils",
author="Olav Vahtras",
author_email="[email protected]",
version=__version__, |
f84846b223bbf903de0dd261dd2c02f7a971d929 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name = 'pymoira',
version = '0.9',
description = 'Client library for MIT Moira service managment system protocol',
author = 'Victor Vasiliev',
author_email = '[email protected]',
url = 'https://github.com/vasilvv/pymoira',
license = 'MIT',
py_modules = ['pymoira'])
| #!/usr/bin/env python
from distutils.core import setup
setup(name = 'pymoira',
version = '0.9',
description = 'Client library for MIT Moira service managment system protocol',
author = 'Victor Vasiliev',
author_email = '[email protected]',
url = 'https://github.com/vasilvv/pymoira',
license = 'MIT',
packages = ['pymoira'])
| Fix a stupid mistake in an installer. | Fix a stupid mistake in an installer.
| Python | mit | vasilvv/pymoira | ---
+++
@@ -9,4 +9,4 @@
author_email = '[email protected]',
url = 'https://github.com/vasilvv/pymoira',
license = 'MIT',
- py_modules = ['pymoira'])
+ packages = ['pymoira']) |
1f03b2945b4e52ce22a3b9e6143d02d3bd9aef99 | overtime_calculator/tests/auth_test.py | overtime_calculator/tests/auth_test.py | import shutil
import pytest
import hug
from overtime_calculator.src import api
from overtime_calculator.src.auth import get_user_folder
def test_register():
user_name = 'test1'
response = hug.test.post(
api,
'/register',
{'username': user_name, 'password': user_name},
)
assert response.data == {'status': 'ok'}
def test_signin():
response = hug.test.post(api, '/signin', {'username': 'test_1', 'password': 'test_1'})
print(response.data)
assert response.data['token'] is not None
def teardown_module():
user_folder = get_user_folder('test1')
shutil.rmtree(str(user_folder), ignore_errors=False)
| import shutil
import pytest
import hug
from overtime_calculator.src import api
from overtime_calculator.src.auth import get_user_folder
EXISTING_USER = 'test1'
UNREGISTERED_USER = 'test2'
def test_registration_of_new_user():
response = hug.test.post(
api,
'/register',
{'username': EXISTING_USER, 'password': EXISTING_USER},
)
print(response.data) # Will only show if test fails and is run with --verbose (-v)
assert response.data == {'status': 'ok'}
def test_second_registration_of_registered_user():
response = hug.test.post(
api,
'/register',
{'username': EXISTING_USER, 'password': EXISTING_USER},
)
print(response.data) # Will only show if test fails and is run with --verbose (-v)
assert response.data == dict(error='username already in use')
def test_sign_in_of_existing_user():
response = hug.test.post(
api,
'/signin',
{'username': EXISTING_USER, 'password': EXISTING_USER}
)
print(response.data) # Will only show if test fails and is run with --verbose (-v)
assert 'token' in response.data and response.data['token']
def teardown_module():
user_folder = get_user_folder(EXISTING_USER)
shutil.rmtree(str(user_folder), ignore_errors=False)
| Add test for already registered user | Feature: Add test for already registered user
| Python | mit | x10an14/overtime-calculator | ---
+++
@@ -6,22 +6,41 @@
from overtime_calculator.src import api
from overtime_calculator.src.auth import get_user_folder
+EXISTING_USER = 'test1'
+UNREGISTERED_USER = 'test2'
-def test_register():
- user_name = 'test1'
+
+def test_registration_of_new_user():
response = hug.test.post(
api,
'/register',
- {'username': user_name, 'password': user_name},
+ {'username': EXISTING_USER, 'password': EXISTING_USER},
)
+ print(response.data) # Will only show if test fails and is run with --verbose (-v)
assert response.data == {'status': 'ok'}
-def test_signin():
- response = hug.test.post(api, '/signin', {'username': 'test_1', 'password': 'test_1'})
- print(response.data)
- assert response.data['token'] is not None
+
+def test_second_registration_of_registered_user():
+ response = hug.test.post(
+ api,
+ '/register',
+ {'username': EXISTING_USER, 'password': EXISTING_USER},
+ )
+ print(response.data) # Will only show if test fails and is run with --verbose (-v)
+ assert response.data == dict(error='username already in use')
+
+
+def test_sign_in_of_existing_user():
+ response = hug.test.post(
+ api,
+ '/signin',
+ {'username': EXISTING_USER, 'password': EXISTING_USER}
+ )
+ print(response.data) # Will only show if test fails and is run with --verbose (-v)
+ assert 'token' in response.data and response.data['token']
+
def teardown_module():
- user_folder = get_user_folder('test1')
+ user_folder = get_user_folder(EXISTING_USER)
shutil.rmtree(str(user_folder), ignore_errors=False)
|
77ee44b0af8a80babf0a88ddd4f53f2f4ad10d2d | tests/test_event.py | tests/test_event.py | import unittest
from evesp.event import Event
class TestEvent(unittest.TestCase):
def setUp(self):
self.evt = Event(foo='bar')
def test_event_creation(self):
self.assertEqual(self.evt.foo, 'bar')
self.assertRaises(AttributeError, getattr, self.evt, 'non_existing')
def test_event_pickle_serialization(self):
ser_evt = self.evt.serialize()
deser_evt = Event.deserialize(ser_evt)
self.assertEqual(deser_evt.foo, 'bar')
self.assertRaises(AttributeError, getattr, deser_evt, 'non_existing')
def test_event_json_serialization(self):
ser_evt = self.evt.to_json()
deser_evt = Event.from_json(ser_evt)
self.assertEqual(deser_evt.foo, 'bar')
self.assertRaises(AttributeError, getattr, deser_evt, 'non_existing')
if __name__ == "__main__":
unittest.main()
# vim:sw=4:ts=4:et:
| import unittest
from evesp.event import Event
class TestEvent(unittest.TestCase):
def setUp(self):
self.evt = Event(foo='bar')
def test_event_creation(self):
self.assertEqual(self.evt.foo, 'bar')
def test_non_existing_event(self):
self.assertRaises(AttributeError, getattr, self.evt, 'non_existing')
def test_event_pickle_serialization(self):
ser_evt = self.evt.serialize()
deser_evt = Event.deserialize(ser_evt)
self.assertEqual(deser_evt.foo, 'bar')
self.assertRaises(AttributeError, getattr, deser_evt, 'non_existing')
def test_event_json_serialization(self):
ser_evt = self.evt.to_json()
deser_evt = Event.from_json(ser_evt)
self.assertEqual(deser_evt.foo, 'bar')
self.assertRaises(AttributeError, getattr, deser_evt, 'non_existing')
if __name__ == "__main__":
unittest.main()
# vim:sw=4:ts=4:et:
| Split one test into two tests | Split one test into two tests
| Python | apache-2.0 | BlackLight/evesp | ---
+++
@@ -8,6 +8,8 @@
def test_event_creation(self):
self.assertEqual(self.evt.foo, 'bar')
+
+ def test_non_existing_event(self):
self.assertRaises(AttributeError, getattr, self.evt, 'non_existing')
def test_event_pickle_serialization(self): |
a27b03a89af6442dc8e1be3d310a8fc046a98ed4 | foampy/tests.py | foampy/tests.py | """
Tests for foamPy.
"""
from .core import *
from .dictionaries import *
from .types import *
from .foil import *
| """Tests for foamPy."""
from .core import *
from .dictionaries import *
from .types import *
from .foil import *
def test_load_all_torque_drag():
"""Test the `load_all_torque_drag` function."""
t, torque, drag = load_all_torque_drag(casedir="test")
assert t.max() == 4.0
| Add test for loading all torque and drag data | Add test for loading all torque and drag data
| Python | mit | petebachant/foamPy,petebachant/foamPy,petebachant/foamPy | ---
+++
@@ -1,8 +1,12 @@
-"""
-Tests for foamPy.
-"""
+"""Tests for foamPy."""
from .core import *
from .dictionaries import *
from .types import *
from .foil import *
+
+
+def test_load_all_torque_drag():
+ """Test the `load_all_torque_drag` function."""
+ t, torque, drag = load_all_torque_drag(casedir="test")
+ assert t.max() == 4.0 |
f2d91d2c296e3662a1b656f0fdf5191665ff363b | skimage/transform/__init__.py | skimage/transform/__init__.py | from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (warp, warp_coords, estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
from ._warps import swirl, homography, resize, rotate, rescale
from .pyramids import (pyramid_reduce, pyramid_expand,
pyramid_gaussian, pyramid_laplacian)
| from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (warp, warp_coords, estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
from ._warps import swirl, resize, rotate, rescale
from .pyramids import (pyramid_reduce, pyramid_expand,
pyramid_gaussian, pyramid_laplacian)
| Remove deprecated import of hompgraphy | Remove deprecated import of hompgraphy
| Python | bsd-3-clause | youprofit/scikit-image,almarklein/scikit-image,keflavich/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,almarklein/scikit-image,chriscrosscutler/scikit-image,ajaybhat/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,SamHames/scikit-image,robintw/scikit-image,emon10005/scikit-image,emon10005/scikit-image,ClinicalGraphics/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,ofgulban/scikit-image,Britefury/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,paalge/scikit-image,Britefury/scikit-image,keflavich/scikit-image,rjeli/scikit-image,newville/scikit-image,bennlich/scikit-image,SamHames/scikit-image,ajaybhat/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,jwiggins/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,oew1v07/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,juliusbierk/scikit-image,Midafi/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,paalge/scikit-image,chintak/scikit-image,robintw/scikit-image,bsipocz/scikit-image,bsipocz/scikit-image,rjeli/scikit-image,bennlich/scikit-image,juliusbierk/scikit-image,chriscrosscutler/scikit-image,jwiggins/scikit-image,michaelpacer/scikit-image,WarrenWeckesser/scikits-image,Hiyorimi/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,dpshelio/scikit-image,rjeli/scikit-image,newville/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,blink1073/scikit-image | ---
+++
@@ -6,6 +6,6 @@
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform,
PiecewiseAffineTransform)
-from ._warps import swirl, homography, resize, rotate, rescale
+from ._warps import swirl, resize, rotate, rescale
from .pyramids import (pyramid_reduce, pyramid_expand,
pyramid_gaussian, pyramid_laplacian) |
589534c52ceff1d4aabb8d72b779359ce2032827 | tests/integration/integration/runner.py | tests/integration/integration/runner.py | import os
import subprocess
def load_variables_from_env(prefix="XII_INTEGRATION_"):
length = len(prefix)
vars = {}
for var in filter(lambda x: x.startswith(prefix), os.environ):
vars[var[length:]] = os.environ[var]
return vars
def run_xii(deffile, cmd, variables={}, gargs=None, cargs=None):
xii_env = os.environ.copy()
for key, value in variables.items():
print("=> XII_" + key + " defined")
xii_env["XII_" + key] = value
call = ["xii", "--no-parallel", "--deffile", deffile, gargs, cmd, cargs]
print("calling `{}`".format(" ".join(filter(None, call))))
process = subprocess.Popen(call, stdout=subprocess.PIPE, env=xii_env)
for line in process.stdout:
print("> " + line.rstrip(os.linesep))
if process.returncode != 0:
raise RuntimeError("running xii failed")
| import os
import subprocess
def load_variables_from_env(prefix="XII_INTEGRATION_"):
length = len(prefix)
vars = {}
for var in filter(lambda x: x.startswith(prefix), os.environ):
vars[var[length:]] = os.environ[var]
return vars
def run_xii(deffile, cmd, variables={}, gargs=None, cargs=None):
xii_env = os.environ.copy()
for key, value in variables.items():
print("=> XII_" + key + " defined")
xii_env["XII_" + key] = value
call = ["xii", "--no-parallel", "--deffile", deffile, cmd]
print("calling `{}`".format(" ".join(call)))
process = subprocess.Popen(call, stdout=subprocess.PIPE, env=xii_env)
for line in process.stdout:
print("> " + line.rstrip(os.linesep))
if process.returncode != 0:
raise RuntimeError("running xii failed")
| Make cargs and gargs truly optional | Make cargs and gargs truly optional
| Python | apache-2.0 | xii/xii,xii/xii | ---
+++
@@ -18,8 +18,8 @@
print("=> XII_" + key + " defined")
xii_env["XII_" + key] = value
- call = ["xii", "--no-parallel", "--deffile", deffile, gargs, cmd, cargs]
- print("calling `{}`".format(" ".join(filter(None, call))))
+ call = ["xii", "--no-parallel", "--deffile", deffile, cmd]
+ print("calling `{}`".format(" ".join(call)))
process = subprocess.Popen(call, stdout=subprocess.PIPE, env=xii_env)
|
705e9ee8ebe1a1c590ccbec8eed9d18abbf8e914 | tests/similarity/test_new_similarity.py | tests/similarity/test_new_similarity.py | import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main() | import unittest
from similarity.nw_similarity import NWAlgorithm
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_nw_algorithm(self):
t = NWAlgorithm('abcdefghij', 'dgj')
t.print_matrix()
(a, b) = t.alignments()
print '---------------'
print a
print b
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testSimple']
unittest.main() | Fix incorrect import reference to nw_similarity | Fix incorrect import reference to nw_similarity
| Python | mit | dpazel/tryinggithub | ---
+++
@@ -15,6 +15,7 @@
t.print_matrix()
(a, b) = t.alignments()
+ print '---------------'
print a
print b
|
b36f89088ab1270054140a3d3020960f23c9790b | aldryn_blog/cms_toolbar.py | aldryn_blog/cms_toolbar.py | # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from cms.toolbar_pool import toolbar_pool
from cms.toolbar_base import CMSToolbar
from aldryn_blog import request_post_identifier
@toolbar_pool.register
class BlogToolbar(CMSToolbar):
def populate(self):
if not (self.is_current_app and self.request.user.has_perm('aldryn_blog.add_post')):
return
menu = self.toolbar.get_or_create_menu('blog-app', _('Blog'))
menu.add_modal_item(_('Add Blog Post'), reverse('admin:aldryn_blog_post_add') + '?_popup',
close_on_url=reverse('admin:aldryn_blog_post_changelist'))
blog_entry = getattr(self.request, request_post_identifier, None)
if blog_entry and self.request.user.has_perm('aldryn_blog.change_post'):
menu.add_modal_item(_('Edit Blog Post'), reverse('admin:aldryn_blog_post_change', args=(
blog_entry.pk,)) + '?_popup',
close_on_url=reverse('admin:aldryn_blog_post_changelist'), active=True)
| # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from cms.toolbar_pool import toolbar_pool
from cms.toolbar_base import CMSToolbar
from aldryn_blog import request_post_identifier
@toolbar_pool.register
class BlogToolbar(CMSToolbar):
def populate(self):
if not (self.is_current_app and self.request.user.has_perm('aldryn_blog.add_post')):
return
menu = self.toolbar.get_or_create_menu('blog-app', _('Blog'))
menu.add_modal_item(_('Add Blog Post'), reverse('admin:aldryn_blog_post_add'),
close_on_url=reverse('admin:aldryn_blog_post_changelist'))
blog_entry = getattr(self.request, request_post_identifier, None)
if blog_entry and self.request.user.has_perm('aldryn_blog.change_post'):
menu.add_modal_item(_('Edit Blog Post'), reverse('admin:aldryn_blog_post_change', args=(
blog_entry.pk,)),
close_on_url=reverse('admin:aldryn_blog_post_changelist'), active=True)
| Remove '?_popup' from toolbar urls | Remove '?_popup' from toolbar urls
| Python | bsd-3-clause | aldryn/aldryn-blog,aldryn/aldryn-blog | ---
+++
@@ -14,11 +14,11 @@
if not (self.is_current_app and self.request.user.has_perm('aldryn_blog.add_post')):
return
menu = self.toolbar.get_or_create_menu('blog-app', _('Blog'))
- menu.add_modal_item(_('Add Blog Post'), reverse('admin:aldryn_blog_post_add') + '?_popup',
+ menu.add_modal_item(_('Add Blog Post'), reverse('admin:aldryn_blog_post_add'),
close_on_url=reverse('admin:aldryn_blog_post_changelist'))
blog_entry = getattr(self.request, request_post_identifier, None)
if blog_entry and self.request.user.has_perm('aldryn_blog.change_post'):
menu.add_modal_item(_('Edit Blog Post'), reverse('admin:aldryn_blog_post_change', args=(
- blog_entry.pk,)) + '?_popup',
+ blog_entry.pk,)),
close_on_url=reverse('admin:aldryn_blog_post_changelist'), active=True) |
33dd6ab01cea7a2a83d3d9d0c7682f716cbcb8b2 | molecule/default/tests/test_default.py | molecule/default/tests/test_default.py | import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hosts_file(host):
f = host.file('/etc/hosts')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
def test_cvmfs_client(host):
"""Test that the CVMFS client is properly installed"""
pkg = host.package('cvmfs')
client = host.file('/usr/bin/cvmfs2')
version = '2.4.3'
assert pkg.is_installed
assert pkg.version.startswith(version)
def test_CODE_RADE_mounted(host):
"""Check that the CODE-RADE repo is mounted"""
assert host.mount_point("/cvmfs/code-rade.africa-grid.org").exists
def test_CODE_RADE_version(host):
"""Check CODE-RADE version"""
cvmfs_version = host.file('/cvmfs/code-rade.africa-grid.org/version')
assert cvmfs_version.exists
assert cvmfs_version.contains('FR3') | import os
import testinfra.utils.ansible_runner
testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner(
os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all')
def test_hosts_file(host):
"""Basic checks on the host."""
f = host.file('/etc/hosts')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
def test_cvmfs_client(host):
"""Test that the CVMFS client is properly installed."""
pkg = host.package('cvmfs')
client = host.file('/usr/bin/cvmfs2')
version = '2.4.3'
assert pkg.is_installed
assert pkg.version.startswith(version)
assert client.exists
def test_CODE_RADE_mounted(host):
"""Check that the CODE-RADE repo is mounted"""
assert host.mount_point("/cvmfs/code-rade.africa-grid.org").exists
def test_CODE_RADE_version(host):
"""Check CODE-RADE version."""
cvmfs_version = host.file('/cvmfs/code-rade.africa-grid.org/version')
assert cvmfs_version.exists
assert cvmfs_version.contains('FR3') | Fix lint errors in tests | Fix lint errors in tests
| Python | apache-2.0 | brucellino/cvmfs-client-2.2,brucellino/cvmfs-client-2.2,AAROC/cvmfs-client-2.2,AAROC/cvmfs-client-2.2 | ---
+++
@@ -7,28 +7,32 @@
def test_hosts_file(host):
+ """Basic checks on the host."""
f = host.file('/etc/hosts')
assert f.exists
assert f.user == 'root'
assert f.group == 'root'
+
def test_cvmfs_client(host):
- """Test that the CVMFS client is properly installed"""
+ """Test that the CVMFS client is properly installed."""
pkg = host.package('cvmfs')
client = host.file('/usr/bin/cvmfs2')
version = '2.4.3'
assert pkg.is_installed
assert pkg.version.startswith(version)
+ assert client.exists
def test_CODE_RADE_mounted(host):
"""Check that the CODE-RADE repo is mounted"""
assert host.mount_point("/cvmfs/code-rade.africa-grid.org").exists
+
def test_CODE_RADE_version(host):
- """Check CODE-RADE version"""
+ """Check CODE-RADE version."""
cvmfs_version = host.file('/cvmfs/code-rade.africa-grid.org/version')
assert cvmfs_version.exists |
7930f968830efd40e1fb200ef331f0c4d955db65 | api/base.py | api/base.py | from django.contrib.auth.models import User
from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization, Authorization
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from builds.models import Build
from projects.models import Project
class UserResource(ModelResource):
class Meta:
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
allowed_methods = ['get', 'post', 'put']
queryset = User.objects.all()
fields = ['username', 'first_name',
'last_name', 'last_login',
'id']
filtering = {
"username": ('exact', 'startswith'),
}
class ProjectResource(ModelResource):
user = fields.ForeignKey(UserResource, 'user')
class Meta:
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
allowed_methods = ['get', 'post', 'put']
queryset = Project.objects.all()
filtering = {
"slug": ('exact', 'startswith'),
}
excludes = ['build_pdf', 'path', 'skip', 'featured']
class BuildResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project')
class Meta:
allowed_methods = ['get']
queryset = Build.objects.all()
filtering = {
"project": ALL,
}
| from django.contrib.auth.models import User
from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization, Authorization
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from builds.models import Build
from projects.models import Project
class UserResource(ModelResource):
class Meta:
#authentication = BasicAuthentication()
#authorization = DjangoAuthorization()
#allowed_methods = ['get', 'post', 'put']
allowed_methods = ['get']
queryset = User.objects.all()
fields = ['username', 'first_name',
'last_name', 'last_login',
'id']
filtering = {
"username": ('exact', 'startswith'),
}
class ProjectResource(ModelResource):
user = fields.ForeignKey(UserResource, 'user')
class Meta:
#authentication = BasicAuthentication()
#authorization = DjangoAuthorization()
allowed_methods = ['get']
queryset = Project.objects.all()
filtering = {
"slug": ('exact', 'startswith'),
}
excludes = ['build_pdf', 'path', 'skip', 'featured']
class BuildResource(ModelResource):
project = fields.ForeignKey(ProjectResource, 'project')
class Meta:
allowed_methods = ['get']
queryset = Build.objects.all()
filtering = {
"project": ALL,
}
| Make API read-only and publically available. | Make API read-only and publically available.
| Python | mit | d0ugal/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,soulshake/readthedocs.org,agjohnson/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,rtfd/readthedocs.org,johncosta/private-readthedocs.org,ojii/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,espdev/readthedocs.org,wanghaven/readthedocs.org,d0ugal/readthedocs.org,sils1297/readthedocs.org,nikolas/readthedocs.org,alex/readthedocs.org,jerel/readthedocs.org,clarkperkins/readthedocs.org,stevepiercy/readthedocs.org,GovReady/readthedocs.org,LukasBoersma/readthedocs.org,kenwang76/readthedocs.org,tddv/readthedocs.org,mrshoki/readthedocs.org,wanghaven/readthedocs.org,alex/readthedocs.org,wanghaven/readthedocs.org,stevepiercy/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,techtonik/readthedocs.org,safwanrahman/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,raven47git/readthedocs.org,royalwang/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,davidfischer/readthedocs.org,atsuyim/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,soulshake/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,singingwolfboy/readthedocs.org,pombredanne/readthedocs.org,asampat3090/readthedocs.org,fujita-shintaro/readthedocs.org,soulshake/readthedocs.org,asampat3090/readthedocs.org,emawind84/readthedocs.org,asampat3090/readthedocs.org,Tazer/readthedocs.org,sils1297/readthedocs.org,laplaceliu/readthedocs.org,espdev/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,raven47git/readthedocs.org,michaelmcandrew/readthedocs.org,hach-que/readthedocs.org,tddv/readthedocs.org,kenshinthebattosai/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,mrshoki/readthedocs.org,nyergler/pythonslides,hach-que/readthedocs.org,emawind84/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,kenwang76/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,SteveViss/readthedocs.org,kdkeyser/readthedocs.org,Carreau/readthedocs.org,hach-que/readthedocs.org,Tazer/readthedocs.org,dirn/readthedocs.org,davidfischer/readthedocs.org,kdkeyser/readthedocs.org,emawind84/readthedocs.org,Tazer/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,istresearch/readthedocs.org,VishvajitP/readthedocs.org,LukasBoersma/readthedocs.org,michaelmcandrew/readthedocs.org,titiushko/readthedocs.org,ojii/readthedocs.org,attakei/readthedocs-oauth,istresearch/readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,fujita-shintaro/readthedocs.org,nikolas/readthedocs.org,mhils/readthedocs.org,kdkeyser/readthedocs.org,davidfischer/readthedocs.org,clarkperkins/readthedocs.org,mrshoki/readthedocs.org,safwanrahman/readthedocs.org,laplaceliu/readthedocs.org,cgourlay/readthedocs.org,nikolas/readthedocs.org,nyergler/pythonslides,sils1297/readthedocs.org,safwanrahman/readthedocs.org,soulshake/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,laplaceliu/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,raven47git/readthedocs.org,atsuyim/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,asampat3090/readthedocs.org,tddv/readthedocs.org,takluyver/readthedocs.org,d0ugal/readthedocs.org,dirn/readthedocs.org,ojii/readthedocs.org,johncosta/private-readthedocs.org,wijerasa/readthedocs.org,johncosta/private-readthedocs.org,fujita-shintaro/readthedocs.org,Carreau/readthedocs.org,agjohnson/readthedocs.org,rtfd/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,alex/readthedocs.org,kenwang76/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,ojii/readthedocs.org,cgourlay/readthedocs.org,KamranMackey/readthedocs.org,nyergler/pythonslides,espdev/readthedocs.org,atsuyim/readthedocs.org,SteveViss/readthedocs.org,sunnyzwh/readthedocs.org,agjohnson/readthedocs.org,titiushko/readthedocs.org,fujita-shintaro/readthedocs.org,mhils/readthedocs.org,Carreau/readthedocs.org,agjohnson/readthedocs.org,gjtorikian/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,kenshinthebattosai/readthedocs.org,alex/readthedocs.org,sid-kap/readthedocs.org,singingwolfboy/readthedocs.org,royalwang/readthedocs.org,mrshoki/readthedocs.org,stevepiercy/readthedocs.org,takluyver/readthedocs.org,nikolas/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,takluyver/readthedocs.org,istresearch/readthedocs.org,wijerasa/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,d0ugal/readthedocs.org,GovReady/readthedocs.org,espdev/readthedocs.org,nyergler/pythonslides,sunnyzwh/readthedocs.org,CedarLogic/readthedocs.org,kdkeyser/readthedocs.org,CedarLogic/readthedocs.org,sid-kap/readthedocs.org | ---
+++
@@ -11,9 +11,10 @@
class UserResource(ModelResource):
class Meta:
- authentication = BasicAuthentication()
- authorization = DjangoAuthorization()
- allowed_methods = ['get', 'post', 'put']
+ #authentication = BasicAuthentication()
+ #authorization = DjangoAuthorization()
+ #allowed_methods = ['get', 'post', 'put']
+ allowed_methods = ['get']
queryset = User.objects.all()
fields = ['username', 'first_name',
'last_name', 'last_login',
@@ -27,9 +28,9 @@
user = fields.ForeignKey(UserResource, 'user')
class Meta:
- authentication = BasicAuthentication()
- authorization = DjangoAuthorization()
- allowed_methods = ['get', 'post', 'put']
+ #authentication = BasicAuthentication()
+ #authorization = DjangoAuthorization()
+ allowed_methods = ['get']
queryset = Project.objects.all()
filtering = {
"slug": ('exact', 'startswith'), |
12e044741e15534aad26238b50126d3b68f8a87d | tools/php2python.py | tools/php2python.py | #!/usr/bin/env python
# php2python.py - Converts PHP to Python using codegen.py
# Usage: php2python.py < input.php > output.py
import sys
sys.path.append('..')
from phply.phpparse import parser
from phply import pythonast
from ast import Module
from unparse import Unparser
input = sys.stdin
output = sys.stdout
body = [pythonast.from_phpast(ast) for ast in parser.parse(input.read())]
Unparser(body, output)
| #!/usr/bin/env python
# php2python.py - Converts PHP to Python using unparse.py
# Usage: php2python.py < input.php > output.py
import sys
sys.path.append('..')
from phply.phpparse import parser
from phply import pythonast
from ast import Module
from unparse import Unparser
input = sys.stdin
output = sys.stdout
body = [pythonast.from_phpast(ast) for ast in parser.parse(input.read())]
Unparser(body, output)
| Update comment to reflect switch to unparse.py | Update comment to reflect switch to unparse.py
| Python | bsd-3-clause | LocutusOfBorg/phply,andresriancho/phply,t-ashula/phply | ---
+++
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# php2python.py - Converts PHP to Python using codegen.py
+# php2python.py - Converts PHP to Python using unparse.py
# Usage: php2python.py < input.php > output.py
import sys |
bd193b0fdb7fec412aed24ad8f4c6353372d634f | polling_stations/apps/data_collection/management/commands/import_westberks.py | polling_stations/apps/data_collection/management/commands/import_westberks.py | """
Import Wokingham Polling stations
"""
from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles
class Command(BaseShpImporter):
"""
Imports the Polling Station data from Wokingham Council
"""
council_id = 'E06000037'
districts_name = 'polling_districts'
stations_name = 'polling_places.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[4],
'postcode' : record[5].split(',')[-1],
'address' : "\n".join(record[5].split(',')[:-1]),
}
def import_polling_stations(self):
import_polling_station_shapefiles(self)
| """
Import Wokingham Polling stations
"""
from data_collection.management.commands import BaseShpShpImporter
class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Wokingham Council
"""
council_id = 'E06000037'
districts_name = 'polling_districts'
stations_name = 'polling_places.shp'
def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
def station_record_to_dict(self, record):
return {
'internal_council_id': record[4],
'postcode' : record[5].split(',')[-1],
'address' : "\n".join(record[5].split(',')[:-1]),
}
| Refactor West Berks to use new BaseShpShpImporter | Refactor West Berks to use new BaseShpShpImporter
| Python | bsd-3-clause | chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | ---
+++
@@ -2,22 +2,21 @@
Import Wokingham Polling stations
"""
-from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles
+from data_collection.management.commands import BaseShpShpImporter
-class Command(BaseShpImporter):
+class Command(BaseShpShpImporter):
"""
Imports the Polling Station data from Wokingham Council
"""
council_id = 'E06000037'
districts_name = 'polling_districts'
stations_name = 'polling_places.shp'
-
- def district_record_to_dict(self, record):
+
+ def district_record_to_dict(self, record):
return {
'internal_council_id': record[0],
'name': record[2],
}
-
def station_record_to_dict(self, record):
return {
@@ -25,6 +24,3 @@
'postcode' : record[5].split(',')[-1],
'address' : "\n".join(record[5].split(',')[:-1]),
}
-
- def import_polling_stations(self):
- import_polling_station_shapefiles(self) |
a08483b5fc55556b46c08e988ac297b1dffaed48 | app/utils/utilities.py | app/utils/utilities.py | from re import search
from flask import g
from flask_restplus import abort
from flask_httpauth import HTTPBasicAuth
from app.models.user import User
from instance.config import Config
auth = HTTPBasicAuth()
def validate_email(email):
''' Method to check that a valid email is provided '''
email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
return True if search(email_re, email) else False
@auth.verify_token
def verify_token(token=None):
''' Method to verify token '''
token = request.headers.get('x-access-token')
user_id = User.verify_authentication_token(token)
if user_id:
g.current_user = User.query.filter_by(id=user.id).first()
return True
return False
| from re import search
from flask import g, request
from flask_httpauth import HTTPTokenAuth
from app.models.user import User
auth = HTTPTokenAuth(scheme='Token')
def validate_email(email):
''' Method to check that a valid email is provided '''
email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
return True if search(email_re, email) else False
@auth.verify_token
def verify_token(token=None):
''' Method to verify token '''
token = request.headers.get('x-access-token')
user_id = User.verify_authentication_token(token)
if user_id:
g.current_user = User.query.filter_by(id=user_id).first()
return True
return False
| Implement HTTPTokenAuth Store user data in global | Implement HTTPTokenAuth
Store user data in global
| Python | mit | Elbertbiggs360/buckelist-api | ---
+++
@@ -1,15 +1,13 @@
from re import search
-from flask import g
-from flask_restplus import abort
-from flask_httpauth import HTTPBasicAuth
+from flask import g, request
+from flask_httpauth import HTTPTokenAuth
from app.models.user import User
-from instance.config import Config
-auth = HTTPBasicAuth()
+auth = HTTPTokenAuth(scheme='Token')
def validate_email(email):
- ''' Method to check that a valid email is provided '''
+ ''' Method to check that a valid email is provided '''
email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
return True if search(email_re, email) else False
@@ -19,6 +17,6 @@
token = request.headers.get('x-access-token')
user_id = User.verify_authentication_token(token)
if user_id:
- g.current_user = User.query.filter_by(id=user.id).first()
+ g.current_user = User.query.filter_by(id=user_id).first()
return True
return False |
e2e494c30b2305b736423ce903103c3b7a459270 | app/template/rest.py | app/template/rest.py | from flask import Blueprint
from flask import (jsonify)
from sqlalchemy.exc import DataError
from sqlalchemy.orm.exc import NoResultFound
from app.dao.templates_dao import get_model_templates
from app.schemas import (template_schema, templates_schema)
template = Blueprint('template', __name__)
# I am going to keep these for admin like operations
# Permissions should restrict who can access this endpoint
# TODO auth to be added.
@template.route('/<int:template_id>', methods=['GET'])
@template.route('/', methods=['GET'])
def get_template(template_id=None):
try:
templates = get_model_templates(template_id=template_id)
except DataError:
return jsonify(result="error", message="Invalid template id"), 400
except NoResultFound:
return jsonify(result="error", message="Template not found"), 404
if isinstance(templates, list):
data, errors = templates_schema.dump(templates)
else:
data, errors = template_schema.dump(templates)
if errors:
return jsonify(result="error", message=str(errors))
return jsonify(data=data)
| from flask import Blueprint
from flask import (jsonify)
from sqlalchemy.exc import DataError
from sqlalchemy.orm.exc import NoResultFound
from app.dao.templates_dao import get_model_templates
from app.schemas import (template_schema, templates_schema)
template = Blueprint('template', __name__)
# I am going to keep these for admin like operations
# Permissions should restrict who can access this endpoint
# TODO auth to be added.
@template.route('/<int:template_id>', methods=['GET'])
@template.route('', methods=['GET'])
def get_template(template_id=None):
try:
templates = get_model_templates(template_id=template_id)
except DataError:
return jsonify(result="error", message="Invalid template id"), 400
except NoResultFound:
return jsonify(result="error", message="Template not found"), 404
if isinstance(templates, list):
data, errors = templates_schema.dump(templates)
else:
data, errors = template_schema.dump(templates)
if errors:
return jsonify(result="error", message=str(errors))
return jsonify(data=data)
| Remove trailing slash from template url | Remove trailing slash from template url
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | ---
+++
@@ -13,7 +13,7 @@
# Permissions should restrict who can access this endpoint
# TODO auth to be added.
@template.route('/<int:template_id>', methods=['GET'])
[email protected]('/', methods=['GET'])
[email protected]('', methods=['GET'])
def get_template(template_id=None):
try:
templates = get_model_templates(template_id=template_id) |
d267b3a52e28b31f4a9d5d99f4ae0742b0f8cca7 | admin/meetings/urls.py | admin/meetings/urls.py | from __future__ import absolute_import
from django.conf.urls import url
from admin.meetings import views
urlpatterns = [
url(r'^$', views.MeetingListView.as_view(), name='list'),
url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
url(r'^(?P<endpoint>[a-zA-Z0-9]+)/$', views.MeetingFormView.as_view(),
name='detail'),
]
| from __future__ import absolute_import
from django.conf.urls import url
from admin.meetings import views
urlpatterns = [
url(r'^$', views.MeetingListView.as_view(), name='list'),
url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
url(r'^(?P<endpoint>[a-zA-Z0-9_]+)/$', views.MeetingFormView.as_view(),
name='detail'),
]
| Allow underscores in meeting endpoints | Allow underscores in meeting endpoints
See https://www.flowdock.com/app/cos/osf-dev/threads/oX7kE29lhFd8quQWt-hbFpyVriH
| Python | apache-2.0 | caseyrollins/osf.io,binoculars/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,cslzchen/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,chrisseto/osf.io,chennan47/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,aaxelb/osf.io,binoculars/osf.io,felliott/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,erinspace/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,sloria/osf.io,aaxelb/osf.io,cslzchen/osf.io,icereval/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,saradbowman/osf.io,leb2dg/osf.io,baylee-d/osf.io,icereval/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,laurenrevere/osf.io,crcresearch/osf.io,mattclark/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,baylee-d/osf.io,felliott/osf.io,chrisseto/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,aaxelb/osf.io,mfraezz/osf.io,adlius/osf.io,TomBaxter/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,binoculars/osf.io,crcresearch/osf.io,felliott/osf.io,leb2dg/osf.io,sloria/osf.io,adlius/osf.io,leb2dg/osf.io,erinspace/osf.io,caneruguz/osf.io,caseyrollins/osf.io,mattclark/osf.io,mfraezz/osf.io,chennan47/osf.io,adlius/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,felliott/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,adlius/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,icereval/osf.io | ---
+++
@@ -7,6 +7,6 @@
urlpatterns = [
url(r'^$', views.MeetingListView.as_view(), name='list'),
url(r'^create/$', views.MeetingCreateFormView.as_view(), name='create'),
- url(r'^(?P<endpoint>[a-zA-Z0-9]+)/$', views.MeetingFormView.as_view(),
+ url(r'^(?P<endpoint>[a-zA-Z0-9_]+)/$', views.MeetingFormView.as_view(),
name='detail'),
] |
463e0ab2a77734cf6787d9cb788a57e7dd53ff06 | games/admin.py | games/admin.py | from django.contrib import admin
from .models import Game, Framework, Release, Asset
class GameAdmin(admin.ModelAdmin):
pass
class FrameworkAdmin(admin.ModelAdmin):
pass
class ReleaseAdmin(admin.ModelAdmin):
pass
class AssetAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'release']
admin.site.register(Game, GameAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(Framework, FrameworkAdmin)
admin.site.register(Asset, AssetAdmin)
| from django.contrib import admin
from .models import Game, Framework, Release, Asset
class GameAdmin(admin.ModelAdmin):
list_display = ['name', 'uuid', 'owner', 'framework', 'public']
class FrameworkAdmin(admin.ModelAdmin):
pass
class ReleaseAdmin(admin.ModelAdmin):
pass
class AssetAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'release']
admin.site.register(Game, GameAdmin)
admin.site.register(Release, ReleaseAdmin)
admin.site.register(Framework, FrameworkAdmin)
admin.site.register(Asset, AssetAdmin)
| Add fields to the game display in Admin | Add fields to the game display in Admin
| Python | mit | stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb | ---
+++
@@ -4,7 +4,7 @@
class GameAdmin(admin.ModelAdmin):
- pass
+ list_display = ['name', 'uuid', 'owner', 'framework', 'public']
class FrameworkAdmin(admin.ModelAdmin): |
0a336447546442ab5d48716223713135a4812adf | get_problem.py | get_problem.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from bs4 import BeautifulSoup
from requests import get, codes
def match_soup_class(target, mode='class'):
def do_match(tag):
classes = tag.get(mode, [])
return all(c in classes for c in target)
return do_match
def main():
if len(sys.argv) == 1:
p = 1
else:
p = int(sys.argv[1])
url = 'https://projecteuler.net/problem=%d' % p
r = get(url)
if r.status_code != codes.ok:
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
print("'''")
print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
print("'''")
if __name__ == '__main__':
main()
| ADD comment for python file | ADD comment for python file
| Python | mit | byung-u/ProjectEuler | ---
+++
@@ -25,9 +25,11 @@
print('[url request failed] ', url)
return
soup = BeautifulSoup(r.text, 'html.parser')
+ print("'''")
+ print('Problem %d' % p)
for content in soup.find_all(match_soup_class(['problem_content'])):
print(content.text)
-
+ print("'''")
if __name__ == '__main__':
main() |
42b4837570fd936c5a7593026fc4868c38d4b09d | base/management/commands/revision_count.py | base/management/commands/revision_count.py | # -*- encoding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.apps import apps
# from reversion import revisions as reversion
from reversion.models import Version
from reversion.errors import RegistrationError
class Command(BaseCommand):
help = "Count reversion records for each model"
def handle(self, *args, **options):
total_count = 0
print_pattern = "{:<15} {:<30s} {:>10d}"
prev_app = None
for model in sorted(
apps.get_models(),
key=lambda mod: mod.__module__ + '.' + mod.__name__):
app_name = model._meta.app_label
model_name = model.__name__
try:
qs = Version.objects.get_for_model(model)
count = qs.count()
total_count += count
if prev_app and prev_app != app_name:
print()
print (print_pattern.format(
app_name if prev_app != app_name else "",
model_name, count
))
prev_app = app_name
except RegistrationError:
# model is not registered with reversion ignore
pass
print ()
print (print_pattern.format("Total Records", "", total_count))
| # -*- encoding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.apps import apps
# from reversion import revisions as reversion
from reversion.models import Version
from reversion.errors import RegistrationError
class Command(BaseCommand):
help = "Count reversion records for each model"
def handle(self, *args, **options):
total_count = 0
print_pattern = "{:<15} {:<30s} {:>10d}"
title_pattern = "{:<15} {:<30s} {:>10s}"
self.stdout.write(title_pattern.format("App", "Model", "Revisions"))
self.stdout.write(title_pattern.format("===", "=====", "========="))
prev_app = None
for model in sorted(
apps.get_models(),
key=lambda mod: mod.__module__ + '.' + mod.__name__):
app_name = model._meta.app_label
model_name = model.__name__
try:
qs = Version.objects.get_for_model(model)
count = qs.count()
total_count += count
if prev_app and prev_app != app_name:
self.stdout.write("")
self.stdout.write(print_pattern.format(
app_name if prev_app != app_name else "",
model_name, count
))
prev_app = app_name
except RegistrationError:
# model is not registered with reversion ignore
pass
self.stdout.write("")
self.stdout.write(print_pattern.format("Total Records", "", total_count))
| Add titles to columns and use write instead of print | Add titles to columns and use write instead of print
| Python | apache-2.0 | pkimber/base,pkimber/base,pkimber/base,pkimber/base | ---
+++
@@ -15,6 +15,9 @@
def handle(self, *args, **options):
total_count = 0
print_pattern = "{:<15} {:<30s} {:>10d}"
+ title_pattern = "{:<15} {:<30s} {:>10s}"
+ self.stdout.write(title_pattern.format("App", "Model", "Revisions"))
+ self.stdout.write(title_pattern.format("===", "=====", "========="))
prev_app = None
for model in sorted(
apps.get_models(),
@@ -27,8 +30,8 @@
count = qs.count()
total_count += count
if prev_app and prev_app != app_name:
- print()
- print (print_pattern.format(
+ self.stdout.write("")
+ self.stdout.write(print_pattern.format(
app_name if prev_app != app_name else "",
model_name, count
))
@@ -36,5 +39,5 @@
except RegistrationError:
# model is not registered with reversion ignore
pass
- print ()
- print (print_pattern.format("Total Records", "", total_count))
+ self.stdout.write("")
+ self.stdout.write(print_pattern.format("Total Records", "", total_count)) |
c0a7bc2aa1b174af74a9f606c358a66f77f98ca9 | src/foremast/utils/get_vpc_id.py | src/foremast/utils/get_vpc_id.py | """Get VPC ID."""
import logging
import requests
from ..consts import API_URL
from ..exceptions import SpinnakerVPCIDNotFound, SpinnakerVPCNotFound
LOG = logging.getLogger(__name__)
def get_vpc_id(account, region):
"""Get vpc id.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1.
Returns:
str: ID for the requested _account_ in _region_.
"""
url = '{0}/vpcs'.format(API_URL)
response = requests.get(url)
LOG.debug('VPC response:\n%s', response.text)
if not response.ok:
LOG.error(response.text)
raise SpinnakerVPCNotFound(response.text)
for vpc in response.json():
LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc)
if all([vpc['name'] == 'vpc', vpc['account'] == account, vpc[
'region'] == region]):
LOG.info('Found VPC ID for %s in %s: %s', account, region,
vpc['id'])
return vpc['id']
else:
raise SpinnakerVPCIDNotFound(response.text)
| """Get VPC ID."""
import logging
import requests
from ..consts import API_URL
from ..exceptions import SpinnakerVPCIDNotFound, SpinnakerVPCNotFound
LOG = logging.getLogger(__name__)
def get_vpc_id(account, region):
"""Get vpc id.
Args:
account (str): AWS account name.
region (str): Region name, e.g. us-east-1.
Returns:
str: ID for the requested _account_ in _region_.
"""
url = '{0}/vpcs'.format(API_URL)
response = requests.get(url)
if not response.ok:
raise SpinnakerVPCNotFound(response.text)
vpcs = response.json()
vpc_id = ''
for vpc in vpcs:
LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc)
if all([
vpc['name'] == 'vpc', vpc['account'] == account, vpc[
'region'] == region
]):
LOG.info('Found VPC ID for %s in %s: %s', account, region,
vpc['id'])
vpc_id = vpc['id']
break
else:
LOG.fatal('VPC list: %s', vpcs)
raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(
account, region))
return vpc_id
| Use more concise error message for VPCs | fix: Use more concise error message for VPCs
See also: PSOBAT-1197
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | ---
+++
@@ -22,18 +22,25 @@
url = '{0}/vpcs'.format(API_URL)
response = requests.get(url)
- LOG.debug('VPC response:\n%s', response.text)
-
if not response.ok:
- LOG.error(response.text)
raise SpinnakerVPCNotFound(response.text)
- for vpc in response.json():
+ vpcs = response.json()
+
+ vpc_id = ''
+ for vpc in vpcs:
LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc)
- if all([vpc['name'] == 'vpc', vpc['account'] == account, vpc[
- 'region'] == region]):
+ if all([
+ vpc['name'] == 'vpc', vpc['account'] == account, vpc[
+ 'region'] == region
+ ]):
LOG.info('Found VPC ID for %s in %s: %s', account, region,
vpc['id'])
- return vpc['id']
+ vpc_id = vpc['id']
+ break
else:
- raise SpinnakerVPCIDNotFound(response.text)
+ LOG.fatal('VPC list: %s', vpcs)
+ raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(
+ account, region))
+
+ return vpc_id |
9eaba3b531d26a61ada7aabd45bc76e68d2375ec | pyoracc/atf/atffile.py | pyoracc/atf/atffile.py | from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
def _debug_lex_and_yac_file(file, debug=0):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
lexer = AtfLexer(debug=debug).lexer
lexer.input(text)
for tok in lexer:
print(tok)
print("Lexed file")
lexer = AtfLexer().lexer
parser = AtfParser().parser
parser.parse(text, lexer=lexer)
print("Parsed file")
| from .atflex import AtfLexer
from .atfyacc import AtfParser
from mako.template import Template
class AtfFile(object):
template = Template("${text.serialize()}")
def __init__(self, content):
self.content = content
if content[-1] != '\n':
content += "\n"
lexer = AtfLexer().lexer
parser = AtfParser().parser
self.text = parser.parse(content, lexer=lexer)
def __str__(self):
return AtfFile.template.render_unicode(**vars(self))
def serialize(self):
return AtfFile.template.render_unicode(**vars(self))
def _debug_lex_and_yac_file(file, debug=0, skipinvalid=False):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
lexer = AtfLexer(debug=debug, skipinvalid=skipinvalid).lexer
lexer.input(text)
for tok in lexer:
print(tok)
print("Lexed file")
lexer = AtfLexer().lexer
parser = AtfParser().parser
parser.parse(text, lexer=lexer)
print("Parsed file")
| Add skip invalid to debug | Add skip invalid to debug
| Python | mit | UCL/pyoracc | ---
+++
@@ -22,11 +22,11 @@
return AtfFile.template.render_unicode(**vars(self))
-def _debug_lex_and_yac_file(file, debug=0):
+def _debug_lex_and_yac_file(file, debug=0, skipinvalid=False):
import codecs
text = codecs.open(file, encoding='utf-8-sig').read()
from pyoracc.atf.atffile import AtfLexer
- lexer = AtfLexer(debug=debug).lexer
+ lexer = AtfLexer(debug=debug, skipinvalid=skipinvalid).lexer
lexer.input(text)
for tok in lexer:
print(tok) |
8b7aa0a540c7927b53adf6368e9cb8476816d941 | asciibooth/statuses.py | asciibooth/statuses.py | # encoding: UTF-8
import random
from . import config
def sampler(source):
def reshuffle():
copy = list(source)
random.shuffle(copy)
return copy
stack = reshuffle()
lastitem = ''
while True:
try:
item = stack.pop()
if item == lastitem:
item = stack.pop()
yield item
lastitem = item
except IndexError:
stack = reshuffle()
continue
def incremental_chance(increment=0.01, start=0.5):
current_chance = start
while True:
r = random.random()
success = (r < current_chance)
if success:
current_chance = start
else:
current_chance += increment
yield success
def status_generator():
random_status = sampler(config.TWEET_MESSAGES)
show_status = incremental_chance(start=0, increment=0.25)
fixed = config.TWEET_FIXED
while True:
status = ''
if next(show_status):
status = next(random_status) + " "
yield "{status}{fixed}".format(status=status, fixed=fixed)
if __name__ == '__main__':
gen = status_generator()
for i in range(0, 20):
print(next(gen))
| # encoding: UTF-8
import random
from . import config
def sampler(source):
def reshuffle():
copy = list(source)
random.shuffle(copy)
return copy
stack = reshuffle()
lastitem = ''
while True:
try:
item = stack.pop()
if item == lastitem:
item = stack.pop()
yield item
lastitem = item
except IndexError:
stack = reshuffle()
continue
def incremental_chance(start=0.5, increment=0.01):
current_chance = start
while True:
r = random.random()
success = (r < current_chance)
if success:
current_chance = start
else:
current_chance += increment
yield success
def status_generator():
random_status = sampler(config.TWEET_MESSAGES)
show_status = incremental_chance(start=config.TWEET_CHANCE_INITIAL, increment=config.TWEET_CHANCE_INCREMENT)
fixed = config.TWEET_FIXED
while True:
status = ''
if next(show_status):
status = next(random_status) + " "
yield "{status}{fixed}".format(status=status, fixed=fixed)
if __name__ == '__main__':
gen = status_generator()
for i in range(0, 20):
print(next(gen))
| Add configuration options for randomness | Add configuration options for randomness
| Python | cc0-1.0 | jnv/asciibooth,jnv/asciibooth | ---
+++
@@ -22,7 +22,7 @@
stack = reshuffle()
continue
-def incremental_chance(increment=0.01, start=0.5):
+def incremental_chance(start=0.5, increment=0.01):
current_chance = start
while True:
@@ -37,7 +37,7 @@
def status_generator():
random_status = sampler(config.TWEET_MESSAGES)
- show_status = incremental_chance(start=0, increment=0.25)
+ show_status = incremental_chance(start=config.TWEET_CHANCE_INITIAL, increment=config.TWEET_CHANCE_INCREMENT)
fixed = config.TWEET_FIXED
|
0d023a51283d477e4b3d02059361b003a91134e0 | jaspyx/scope.py | jaspyx/scope.py | class Scope(object):
tmp_index = 0
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
@classmethod
def alloc_temp(cls):
cls.tmp_index += 1
return '__jpx_tmp_%i' % cls.tmp_index
| class Scope(object):
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
self.declarations = {}
self.globals = set()
self.inherited = True
def prefixed(self, name):
return '.'.join(self.prefix + [name])
def declare(self, name, var=True):
self.declarations[name] = var
def get_scope(self, name, inherit=False):
if name in self.declarations and (not inherit or self.inherited):
return self
elif self.parent is not None:
return self.parent.get_scope(name, True)
else:
return None
def declare_global(self, name):
self.globals.add(name)
def is_global(self, name):
return name in self.globals
def get_global_scope(self):
if self.parent:
return self.parent.get_global_scope()
else:
return self
| Remove temp var allocation code. | Remove temp var allocation code.
| Python | mit | ztane/jaspyx,iksteen/jaspyx | ---
+++
@@ -1,6 +1,4 @@
class Scope(object):
- tmp_index = 0
-
def __init__(self, parent=None):
self.parent = parent
self.prefix = []
@@ -33,8 +31,3 @@
return self.parent.get_global_scope()
else:
return self
-
- @classmethod
- def alloc_temp(cls):
- cls.tmp_index += 1
- return '__jpx_tmp_%i' % cls.tmp_index |
4524b88eef8a46d40c4d353c3561401ac3689878 | bookmarks/urls.py | bookmarks/urls.py | from django.conf.urls import patterns, url
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
vote_on_object, dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
| from django.conf.urls import patterns, url
from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
from bookmarks.models import Bookmark
urlpatterns = patterns('',
url(r'^$', 'bookmarks.views.bookmarks', name="all_bookmarks"),
url(r'^your_bookmarks/$', 'bookmarks.views.your_bookmarks', name="your_bookmarks"),
url(r'^add/$', 'bookmarks.views.add', name="add_bookmark"),
url(r'^(\d+)/delete/$', 'bookmarks.views.delete', name="delete_bookmark_instance"),
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html',
allow_xmlhttprequest=True)),
)
| Disable csrf checks for voting | Disable csrf checks for voting
| Python | mit | incuna/incuna-bookmarks,incuna/incuna-bookmarks | ---
+++
@@ -1,4 +1,5 @@
from django.conf.urls import patterns, url
+from django.views.decorators.csrf import csrf_exempt
# for voting
from voting.views import vote_on_object
@@ -12,7 +13,7 @@
# for voting
(r'^(?P<object_id>\d+)/(?P<direction>up|down|clear)vote/?$',
- vote_on_object, dict(
+ csrf_exempt(vote_on_object), dict(
model=Bookmark,
template_object_name='bookmark',
template_name='kb/link_confirm_vote.html', |
c2b0f66d5760d61444b4909e40c45993780cd473 | examples/champion.py | examples/champion.py | import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
return
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Renekton")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
| import cassiopeia as cass
from cassiopeia.core import Champion
def test_cass():
#annie = Champion(name="Annie", region="NA")
annie = Champion(name="Annie")
print(annie.name)
print(annie.title)
print(annie.title)
for spell in annie.spells:
print(spell.name, spell.keywords)
print(annie.info.difficulty)
print(annie.passive.name)
#print(annie.recommended_itemsets[0].item_sets[0].items)
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items)
print(ziggs.free_to_play)
for spell in ziggs.spells:
for var in spell.variables:
print(spell.name, var)
print(ziggs._Ghost__all_loaded)
if __name__ == "__main__":
test_cass()
| Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs | Remove `return`, get Ziggs instead of Renekton, since we're saving as Ziggs
| Python | mit | robrua/cassiopeia,meraki-analytics/cassiopeia,10se1ucgo/cassiopeia | ---
+++
@@ -16,12 +16,11 @@
print(annie.free_to_play)
print(annie._Ghost__all_loaded)
print(annie)
- return
print()
#ziggs = cass.get_champion(region="NA", "Ziggs")
- ziggs = cass.get_champion("Renekton")
+ ziggs = cass.get_champion("Ziggs")
print(ziggs.name)
print(ziggs.region)
#print(ziggs.recommended_itemset[0].item_sets[0].items) |
f22eff612427dc5f530858bb47326d69b48aa68a | darchan/urls.py | darchan/urls.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
'darchan.views',
url(r'^view_matrix/$', 'v_view_last_matrix', name='view_last_matrix'),
url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$', 'v_view_matrix',
name='view_matrix'),
url(r'^generate_matrix/$', 'v_generate_matrix', name='generate_matrix'),
url(r'^download_csv/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
'v_download_csv', name='download_csv'),
)
| # -*- coding: utf-8 -*-
# from __future__ import unicode_literals
from django.conf.urls import url
from darchan import views
urlpatterns = [
url(r'^view_matrix/$',
views.v_view_last_matrix, name='view_last_matrix'),
url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
views.v_view_matrix, name='view_matrix'),
url(r'^generate_matrix/$',
views.v_generate_matrix, name='generate_matrix'),
url(r'^download_csv/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
views.v_download_csv, name='download_csv'),
]
| Update support to Django 1.9 | Update support to Django 1.9
| Python | mpl-2.0 | Pawamoy/django-archan,Pawamoy/django-archan,Pawamoy/django-archan | ---
+++
@@ -1,13 +1,15 @@
# -*- coding: utf-8 -*-
-from __future__ import unicode_literals
-from django.conf.urls import patterns, url
+# from __future__ import unicode_literals
+from django.conf.urls import url
+from darchan import views
-urlpatterns = patterns(
- 'darchan.views',
- url(r'^view_matrix/$', 'v_view_last_matrix', name='view_last_matrix'),
- url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$', 'v_view_matrix',
- name='view_matrix'),
- url(r'^generate_matrix/$', 'v_generate_matrix', name='generate_matrix'),
+urlpatterns = [
+ url(r'^view_matrix/$',
+ views.v_view_last_matrix, name='view_last_matrix'),
+ url(r'^view_matrix/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
+ views.v_view_matrix, name='view_matrix'),
+ url(r'^generate_matrix/$',
+ views.v_generate_matrix, name='generate_matrix'),
url(r'^download_csv/(?P<builder_id>\d+)/(?P<depth>\d+)/$',
- 'v_download_csv', name='download_csv'),
-)
+ views.v_download_csv, name='download_csv'),
+] |
7e2d6cfa6b6a536d1df6e2d2d523a4bb4094f5eb | src/poliastro/plotting/misc.py | src/poliastro/plotting/misc.py | from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch: ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D()
else:
op = OrbitPlotter2D()
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
# Sets frame to the orbit of the Earth by default
# TODO: Wait until https://github.com/poliastro/poliastro/issues/316
# op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw())
return op
| from typing import Union
from poliastro.bodies import (
Earth,
Jupiter,
Mars,
Mercury,
Neptune,
Saturn,
Uranus,
Venus,
)
from poliastro.plotting.core import OrbitPlotter2D, OrbitPlotter3D
from poliastro.twobody import Orbit
def plot_solar_system(outer=True, epoch=None, use_3d=False):
"""
Plots the whole solar system in one single call.
.. versionadded:: 0.9.0
Parameters
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
epoch : ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
use_3d : bool, optional
Produce 3D plot, default to False.
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
op = OrbitPlotter3D() # type: Union[OrbitPlotter3D, OrbitPlotter2D]
else:
op = OrbitPlotter2D()
op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw()) # type: ignore
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
return op
| Set frame only when using 2D | Set frame only when using 2D
| Python | mit | Juanlu001/poliastro,Juanlu001/poliastro,Juanlu001/poliastro,poliastro/poliastro | ---
+++
@@ -1,3 +1,5 @@
+from typing import Union
+
from poliastro.bodies import (
Earth,
Jupiter,
@@ -22,24 +24,24 @@
------------
outer : bool, optional
Whether to print the outer Solar System, default to True.
- epoch: ~astropy.time.Time, optional
+ epoch : ~astropy.time.Time, optional
Epoch value of the plot, default to J2000.
+ use_3d : bool, optional
+ Produce 3D plot, default to False.
+
"""
bodies = [Mercury, Venus, Earth, Mars]
if outer:
bodies.extend([Jupiter, Saturn, Uranus, Neptune])
if use_3d:
- op = OrbitPlotter3D()
+ op = OrbitPlotter3D() # type: Union[OrbitPlotter3D, OrbitPlotter2D]
else:
op = OrbitPlotter2D()
+ op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw()) # type: ignore
for body in bodies:
orb = Orbit.from_body_ephem(body, epoch)
op.plot(orb, label=str(body))
- # Sets frame to the orbit of the Earth by default
- # TODO: Wait until https://github.com/poliastro/poliastro/issues/316
- # op.set_frame(*Orbit.from_body_ephem(Earth, epoch).pqw())
-
return op |
40347e45646aa57c9181cb289dfa88a3b3eb3396 | experiment/models.py | experiment/models.py | from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, default='')
def __str__(self):
return self.name
| from django.db import models
from experiment_session.models import ExperimentSession
from django.core.validators import MinValueValidator
class Experiment(models.Model):
LIGHTOFF_FIXED = 'fixed'
LIGHTOFF_WAITING = 'waiting'
_LIGHTOFF_CHOICES = (
(LIGHTOFF_FIXED, 'Fixed'),
(LIGHTOFF_WAITING, 'Waiting')
)
AUDIO_NONE = 'none'
AUDIO_BEEP = 'beep'
_AUDIO_CHOICES = (
(AUDIO_NONE, 'None'),
(AUDIO_BEEP, 'Audible beep on error')
)
name = models.CharField(unique=True, max_length=255)
lightoffmode = models.CharField(
choices=_LIGHTOFF_CHOICES,
max_length=30
)
lightofftimeout = models.IntegerField(validators=(MinValueValidator(0),))
audiomode = models.CharField(
choices=_AUDIO_CHOICES,
max_length=30
)
repeatscount = models.IntegerField(
validators=(
MinValueValidator(1),
)
)
createdon = models.DateTimeField(auto_now_add=True, editable=False)
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
instructions = models.CharField(max_length=10000, blank=True)
def __str__(self):
return self.name
| Allow empty strings as instructions | Allow empty strings as instructions
| Python | mit | piotrb5e3/1023alternative-backend | ---
+++
@@ -42,7 +42,7 @@
traininglength = models.IntegerField(validators=(MinValueValidator(0),))
- instructions = models.CharField(max_length=10000, default='')
+ instructions = models.CharField(max_length=10000, blank=True)
def __str__(self):
return self.name |
04944ccd83e924fed6b351a6073d837a5ce639e9 | sevenbridges/models/compound/price_breakdown.py | sevenbridges/models/compound/price_breakdown.py | import six
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import StringField
class Breakdown(Resource):
"""
Breakdown resource contains price breakdown by storage and computation.
"""
storage = StringField(read_only=True)
computation = StringField(read_only=True)
def __str__(self):
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}>'.format(
storage=self.storage, computation=self.computation
)
)
| import six
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import StringField
class Breakdown(Resource):
"""
Breakdown resource contains price breakdown by storage and computation.
"""
storage = StringField(read_only=True)
computation = StringField(read_only=True)
data_transfer = StringField(read_only=True)
def __str__(self):
if self.data_transfer:
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}, '
'data_transfer={data_transfer}>'.format(
storage=self.storage, computation=self.computation,
data_transfer=self.data_transfer
)
)
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}>'.format(
storage=self.storage, computation=self.computation
)
)
| Add data_transfer to price breakdown | Add data_transfer to price breakdown
| Python | apache-2.0 | sbg/sevenbridges-python | ---
+++
@@ -10,8 +10,17 @@
"""
storage = StringField(read_only=True)
computation = StringField(read_only=True)
+ data_transfer = StringField(read_only=True)
def __str__(self):
+ if self.data_transfer:
+ return six.text_type(
+ '<Breakdown: storage={storage}, computation={computation}, '
+ 'data_transfer={data_transfer}>'.format(
+ storage=self.storage, computation=self.computation,
+ data_transfer=self.data_transfer
+ )
+ )
return six.text_type(
'<Breakdown: storage={storage}, computation={computation}>'.format(
storage=self.storage, computation=self.computation |
4257910deffdb624c0c8faf325933fb5e775d9e6 | furious/_pkg_meta.py | furious/_pkg_meta.py | version_info = (1, 4, 0)
version = '.'.join(map(str, version_info))
| version_info = (1, 5, 0)
version = '.'.join(map(str, version_info))
| Update the version to 1.5.0 to prepare for next release | Update the version to 1.5.0 to prepare for next release
Update the _pkg_meta version to 1.5.0 to prepare for the next release.
| Python | apache-2.0 | andreleblanc-wf/furious,Workiva/furious,andreleblanc-wf/furious,Workiva/furious,beaulyddon-wf/furious,beaulyddon-wf/furious | ---
+++
@@ -1,2 +1,2 @@
-version_info = (1, 4, 0)
+version_info = (1, 5, 0)
version = '.'.join(map(str, version_info)) |
9b0d5796c1e48a3bf294971dc129499876936a36 | send2trash/plat_osx.py | send2trash/plat_osx.py | # Copyright 2017 Virgil Dupras
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from platform import mac_ver
from sys import version_info
# If macOS is 11.0 or newer try to use the pyobjc version to get around #51
# NOTE: pyobjc only supports python >= 3.6
if version_info >= (3, 6) and int(mac_ver()[0].split(".", 1)[0]) >= 11:
try:
from .plat_osx_pyobjc import send2trash
except ImportError:
# Try to fall back to ctypes version, although likely problematic still
from .plat_osx_ctypes import send2trash
else:
# Just use the old version otherwise
from .plat_osx_ctypes import send2trash # noqa: F401
| # Copyright 2017 Virgil Dupras
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from platform import mac_ver
from sys import version_info
# NOTE: version of pyobjc only supports python >= 3.6 and 10.9+
macos_ver = tuple(int(part) for part in mac_ver()[0].split("."))
if version_info >= (3, 6) and macos_ver >= (10, 9):
try:
from .plat_osx_pyobjc import send2trash
except ImportError:
# Try to fall back to ctypes version, although likely problematic still
from .plat_osx_ctypes import send2trash
else:
# Just use the old version otherwise
from .plat_osx_ctypes import send2trash # noqa: F401
| Change conditional for macos pyobjc usage | Change conditional for macos pyobjc usage
macOS 11.x will occasionally identify as 10.16, since there was no real
reason to prevent on all supported platforms allow.
| Python | bsd-3-clause | hsoft/send2trash | ---
+++
@@ -7,9 +7,9 @@
from platform import mac_ver
from sys import version_info
-# If macOS is 11.0 or newer try to use the pyobjc version to get around #51
-# NOTE: pyobjc only supports python >= 3.6
-if version_info >= (3, 6) and int(mac_ver()[0].split(".", 1)[0]) >= 11:
+# NOTE: version of pyobjc only supports python >= 3.6 and 10.9+
+macos_ver = tuple(int(part) for part in mac_ver()[0].split("."))
+if version_info >= (3, 6) and macos_ver >= (10, 9):
try:
from .plat_osx_pyobjc import send2trash
except ImportError: |
965ff5b8fb83891a04c1601ceb3dc11f255dcb53 | web/__init__.py | web/__init__.py | import os
from flask import Flask
__version__ = '0.1.5'
def create_app(name=__name__):
app = Flask(name)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DB_URI']
from web.main import main_module
from web.news import news_module
app.register_blueprint(main_module, url_prefix='/')
app.register_blueprint(news_module, url_prefix='/news')
from web.news.models import db
db.init_app(app)
return app
app = create_app()
if __name__ == '__main__':
host = os.environ.get('HOST', '0.0.0.0')
port = int(os.environ.get('PORT', 8005))
debug = bool(os.environ.get('DEBUG', False))
app.run(host=host, port=port, debug=debug)
| import os
from flask import Flask
__version__ = '0.1.5'
def create_app(name=__name__):
app = Flask(name)
from web.main import main_module
app.register_blueprint(main_module, url_prefix='/')
return app
app = create_app()
if __name__ == '__main__':
host = os.environ.get('HOST', '0.0.0.0')
port = int(os.environ.get('PORT', 8005))
debug = bool(os.environ.get('DEBUG', False))
app.run(host=host, port=port, debug=debug)
| Remove all traces of the news module | Remove all traces of the news module
| Python | bsd-3-clause | suminb/tldr-web,suminb/tldr-web,suminb/tldr-web | ---
+++
@@ -8,16 +8,9 @@
def create_app(name=__name__):
app = Flask(name)
- app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DB_URI']
from web.main import main_module
- from web.news import news_module
-
app.register_blueprint(main_module, url_prefix='/')
- app.register_blueprint(news_module, url_prefix='/news')
-
- from web.news.models import db
- db.init_app(app)
return app
|
c09b468583c97d7831478119614b231be0d24afa | scripts/generate_input_syntax.py | scripts/generate_input_syntax.py | #!/usr/bin/env python
import sys, os
# get the location of this script
app_path = os.path.abspath(os.path.dirname(sys.argv[0]))
# this script is actually in the scripts subdirectory, so go up a level
app_path += '/..'
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
MOOSE_DIR = app_path + '/../moose'
# See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR)
| #!/usr/bin/env python
import sys, os
# get the location of this script
app_path = os.path.abspath(os.path.dirname(sys.argv[0]))
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose'))
FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework'))
#### See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework')
if os.environ.has_key("FRAMEWORK_DIR"):
FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR']
sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
| Update scripts to reflect new MOOSE_DIR definition | Update scripts to reflect new MOOSE_DIR definition
r25009
| Python | lgpl-2.1 | idaholab/falcon,aeslaughter/falcon,idaholab/falcon,aeslaughter/falcon,idaholab/falcon,idaholab/falcon,aeslaughter/falcon | ---
+++
@@ -3,19 +3,22 @@
# get the location of this script
app_path = os.path.abspath(os.path.dirname(sys.argv[0]))
-# this script is actually in the scripts subdirectory, so go up a level
-app_path += '/..'
# Set the name of the application here and moose directory relative to the application
app_name = 'falcon'
-MOOSE_DIR = app_path + '/../moose'
-# See if MOOSE_DIR is already in the environment instead
+
+MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose'))
+FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework'))
+#### See if MOOSE_DIR is already in the environment instead
if os.environ.has_key("MOOSE_DIR"):
MOOSE_DIR = os.environ['MOOSE_DIR']
+ FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework')
+if os.environ.has_key("FRAMEWORK_DIR"):
+ FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR']
-sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML')
+sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML')
import genInputFileSyntaxHTML
# this will automatically copy the documentation to the base directory
# in a folder named syntax
-genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR)
+genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR) |
6dbd72af13f017d9b1681da49f60aaf69f0a9e41 | tests/transformer_test_case.py | tests/transformer_test_case.py | class TransformerTestCase(object):
def get_pattern_for_spec(self, patterns, spec):
for pattern in patterns:
if pattern.search(spec):
return pattern
| from spec2scl import settings
from spec2scl import specfile
class TransformerTestCase(object):
def make_prep(self, spec):
# just create one of settings.RUNTIME_SECTIONS, so that we can test all the matching
return '%prep\n' + spec
def get_pattern_for_spec(self, handler, spec_text):
spec = specfile.Specfile(spec_text)
for s_name, s_text in spec.sections:
for i, pattern in enumerate(handler.matches):
if pattern.search(s_text) and s_name in handler.sections[i]:
return pattern
| Improve our custom test case | Improve our custom test case
- create a make_prep method that allows quick creation of prep section from anything for good testing of custom transformers (that usually don't transform header section)
- improve get_pattern_for_spec with section checking
| Python | mit | mbooth101/spec2scl,sclorg/spec2scl | ---
+++
@@ -1,6 +1,15 @@
+from spec2scl import settings
+from spec2scl import specfile
+
class TransformerTestCase(object):
- def get_pattern_for_spec(self, patterns, spec):
- for pattern in patterns:
- if pattern.search(spec):
- return pattern
+ def make_prep(self, spec):
+ # just create one of settings.RUNTIME_SECTIONS, so that we can test all the matching
+ return '%prep\n' + spec
+ def get_pattern_for_spec(self, handler, spec_text):
+ spec = specfile.Specfile(spec_text)
+ for s_name, s_text in spec.sections:
+ for i, pattern in enumerate(handler.matches):
+ if pattern.search(s_text) and s_name in handler.sections[i]:
+ return pattern
+ |
4de9bee656041c9cfcd91ec61d294460f6427d77 | lib/database.py | lib/database.py |
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
self.cursor.execute(sql)
self.db.commit()
| import pymysql
class Database:
def __init__(self, db):
self.db = db
self.cursor = db.cursor()
def disconnect(self):
self.cursor.close()
self.db.close()
def query(self, sql):
try:
self.cursor.execute(sql)
return self.cursor.fetchall()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
return self.cursor.fetchall()
def insert(self, sql):
try:
self.cursor.execute(sql)
self.db.commit()
except pymysql.OperationalError:
self.db.ping()
self.cursor.execute(sql)
self.db.commit()
| Reconnect if the connection times out. | Reconnect if the connection times out.
| Python | mit | aquaticpond/pyqodbc | ---
+++
@@ -1,3 +1,4 @@
+import pymysql
class Database:
def __init__(self, db):
@@ -9,11 +10,19 @@
self.db.close()
def query(self, sql):
- self.cursor.execute(sql)
- return self.cursor.fetchall()
+ try:
+ self.cursor.execute(sql)
+ return self.cursor.fetchall()
+ except pymysql.OperationalError:
+ self.db.ping()
+ self.cursor.execute(sql)
+ return self.cursor.fetchall()
def insert(self, sql):
- self.cursor.execute(sql)
- self.db.commit()
-
-
+ try:
+ self.cursor.execute(sql)
+ self.db.commit()
+ except pymysql.OperationalError:
+ self.db.ping()
+ self.cursor.execute(sql)
+ self.db.commit() |
8657f7aef8944eae718cabaaa7dfd25d2ec95960 | conditions/__init__.py | conditions/__init__.py | from .conditions import *
from .exceptions import *
from .fields import *
from .lists import *
from .types import *
| from .conditions import Condition, CompareCondition
from .exceptions import UndefinedConditionError, InvalidConditionError
from .fields import ConditionsWidget, ConditionsFormField, ConditionsField
from .lists import CondList, CondAllList, CondAnyList, eval_conditions
from .types import conditions_from_module
__all__ = [
'Condition', 'CompareCondition', 'UndefinedConditionError', 'InvalidConditionError', 'ConditionsWidget',
'ConditionsFormField', 'ConditionsField', 'CondList', 'CondAllList', 'CondAnyList', 'eval_conditions',
'conditions_from_module',
]
| Replace star imports with explicit imports | PEP8: Replace star imports with explicit imports
| Python | isc | RevolutionTech/django-conditions,RevolutionTech/django-conditions,RevolutionTech/django-conditions | ---
+++
@@ -1,5 +1,12 @@
-from .conditions import *
-from .exceptions import *
-from .fields import *
-from .lists import *
-from .types import *
+from .conditions import Condition, CompareCondition
+from .exceptions import UndefinedConditionError, InvalidConditionError
+from .fields import ConditionsWidget, ConditionsFormField, ConditionsField
+from .lists import CondList, CondAllList, CondAnyList, eval_conditions
+from .types import conditions_from_module
+
+
+__all__ = [
+ 'Condition', 'CompareCondition', 'UndefinedConditionError', 'InvalidConditionError', 'ConditionsWidget',
+ 'ConditionsFormField', 'ConditionsField', 'CondList', 'CondAllList', 'CondAnyList', 'eval_conditions',
+ 'conditions_from_module',
+] |
7d79e6f0404b04ababaca3d8c50b1e682fd64222 | chainer/initializer.py | chainer/initializer.py | import typing as tp # NOQA
from chainer import types # NOQA
from chainer import utils
class Initializer(object):
"""Initializes array.
It initializes the given array.
Attributes:
dtype: Data type specifier. It is for type check in ``__call__``
function.
"""
def __init__(self, dtype=None):
# type: (tp.Optional[types.DTypeSpec]) -> None
self.dtype = dtype # type: types.DTypeSpec
def __call__(self, array):
# type: (types.NdArray) -> None
"""Initializes given array.
This method destructively changes the value of array.
The derived class is required to implement this method.
The algorithms used to make the new values depend on the
concrete derived classes.
Args:
array (:ref:`ndarray`):
An array to be initialized by this initializer.
"""
raise NotImplementedError()
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
def get_fans(shape):
if not isinstance(shape, tuple):
raise ValueError('shape must be tuple')
if len(shape) < 2:
raise ValueError('shape must be of length >= 2: shape={}', shape)
receptive_field_size = utils.size_of_shape(shape[2:])
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
return fan_in, fan_out
| import typing as tp # NOQA
from chainer import types # NOQA
from chainer import utils
class Initializer(object):
"""Initializes array.
It initializes the given array.
Attributes:
dtype: Data type specifier. It is for type check in ``__call__``
function.
"""
def __init__(self, dtype=None):
# type: (tp.Optional[types.DTypeSpec]) -> None
self.dtype = dtype # type: types.DTypeSpec
def __call__(self, array):
# type: (types.NdArray) -> None
"""Initializes given array.
This method destructively changes the value of array.
The derived class is required to implement this method.
The algorithms used to make the new values depend on the
concrete derived classes.
Args:
array (:ref:`ndarray`):
An array to be initialized by this initializer.
"""
raise NotImplementedError()
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
def get_fans(shape):
if not isinstance(shape, tuple):
raise ValueError(
'shape must be tuple. Actual type: {}'.format(type(shape)))
if len(shape) < 2:
raise ValueError(
'shape must be of length >= 2. Actual shape: {}'.format(shape))
receptive_field_size = utils.size_of_shape(shape[2:])
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
return fan_in, fan_out
| Fix error messages in get_fans | Fix error messages in get_fans
| Python | mit | niboshi/chainer,tkerola/chainer,niboshi/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,wkentaro/chainer,niboshi/chainer,okuta/chainer,okuta/chainer,wkentaro/chainer,pfnet/chainer,okuta/chainer,hvy/chainer,wkentaro/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,chainer/chainer,chainer/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,niboshi/chainer,hvy/chainer,hvy/chainer | ---
+++
@@ -42,10 +42,12 @@
def get_fans(shape):
if not isinstance(shape, tuple):
- raise ValueError('shape must be tuple')
+ raise ValueError(
+ 'shape must be tuple. Actual type: {}'.format(type(shape)))
if len(shape) < 2:
- raise ValueError('shape must be of length >= 2: shape={}', shape)
+ raise ValueError(
+ 'shape must be of length >= 2. Actual shape: {}'.format(shape))
receptive_field_size = utils.size_of_shape(shape[2:])
fan_in = shape[1] * receptive_field_size |
302d9797b7ccb46e7b9575513c0a2c5461e156a5 | yvs/set_pref.py | yvs/set_pref.py | # yvs.set_pref
# coding=utf-8
from __future__ import unicode_literals
import json
import sys
import yvs.shared as shared
# Parse pref set data from the given JSON string
def parse_pref_set_data_str(pref_set_data_str):
pref_set_data = json.loads(
pref_set_data_str)['alfredworkflow']['variables']
return [pref_set_data[key] for key in
['pref_id', 'pref_name', 'value_id', 'value_name']]
# Set the YouVersion Suggest preference with the given key
def set_pref(pref_id, value_id):
user_prefs = shared.get_user_prefs()
user_prefs[pref_id] = value_id
# If new language is set, ensure that preferred version is updated also
if pref_id == 'language':
bible = shared.get_bible_data(language_id=value_id)
user_prefs['version'] = bible['default_version']
shared.clear_cache()
shared.set_user_prefs(user_prefs)
def main(pref_set_data_str):
pref_id, pref_name, value_id, value_name = parse_pref_set_data_str(
pref_set_data_str)
set_pref(pref_id, value_id)
print(pref_set_data_str.encode('utf-8'))
if __name__ == '__main__':
main(sys.argv[1].decode('utf-8'))
| # yvs.set_pref
# coding=utf-8
from __future__ import unicode_literals
import json
import sys
import yvs.shared as shared
# Parse pref set data from the given JSON string
def parse_pref_set_data_str(pref_set_data_str):
pref_set_data = json.loads(
pref_set_data_str)['alfredworkflow']['variables']
return [pref_set_data[key] for key in
('pref_id', 'pref_name', 'value_id', 'value_name')]
# Set the YouVersion Suggest preference with the given key
def set_pref(pref_id, value_id):
user_prefs = shared.get_user_prefs()
user_prefs[pref_id] = value_id
# If new language is set, ensure that preferred version is updated also
if pref_id == 'language':
bible = shared.get_bible_data(language_id=value_id)
user_prefs['version'] = bible['default_version']
shared.clear_cache()
shared.set_user_prefs(user_prefs)
def main(pref_set_data_str):
pref_id, pref_name, value_id, value_name = parse_pref_set_data_str(
pref_set_data_str)
set_pref(pref_id, value_id)
print(pref_set_data_str.encode('utf-8'))
if __name__ == '__main__':
main(sys.argv[1].decode('utf-8'))
| Switch to tuple for pref set data key list | Switch to tuple for pref set data key list
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | ---
+++
@@ -15,7 +15,7 @@
pref_set_data = json.loads(
pref_set_data_str)['alfredworkflow']['variables']
return [pref_set_data[key] for key in
- ['pref_id', 'pref_name', 'value_id', 'value_name']]
+ ('pref_id', 'pref_name', 'value_id', 'value_name')]
# Set the YouVersion Suggest preference with the given key |
3786d778f583f96cb4dce37a175d2c460a020724 | cnxauthoring/events.py | cnxauthoring/events.py | # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.host in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.host))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
| # -*- coding: utf-8 -*-
# ###
# Copyright (c) 2013, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from pyramid.events import NewRequest
def add_cors_headers(request, response):
settings = request.registry.settings
acac = settings['cors.access_control_allow_credentials']
acao = settings['cors.access_control_allow_origin'].split()
acah = settings['cors.access_control_allow_headers']
acam = settings['cors.access_control_allow_methods']
if acac:
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
if request.headers.get('Origin') in acao:
response.headerlist.append(
('Access-Control-Allow-Origin', request.headers.get('Origin')))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0]))
if acah:
response.headerlist.append(
('Access-Control-Allow-Headers', acah))
if acam:
response.headerlist.append(
('Access-Control-Allow-Methods', acam))
def new_request_subscriber(event):
request = event.request
request.add_response_callback(add_cors_headers)
def main(config):
config.add_subscriber(new_request_subscriber, NewRequest)
| Fix Access-Control-Allow-Origin to return the request origin | Fix Access-Control-Allow-Origin to return the request origin
request.host is the host part of the request url. For example, if
webview is trying to access http://localhost:8080/users/profile,
request. It's the Origin field in the headers that we should be
matching.
| Python | agpl-3.0 | Connexions/cnx-authoring | ---
+++
@@ -18,9 +18,9 @@
response.headerlist.append(
('Access-Control-Allow-Credentials', acac))
if acao:
- if request.host in acao:
+ if request.headers.get('Origin') in acao:
response.headerlist.append(
- ('Access-Control-Allow-Origin', request.host))
+ ('Access-Control-Allow-Origin', request.headers.get('Origin')))
else:
response.headerlist.append(
('Access-Control-Allow-Origin', acao[0])) |
7699c2922e6ac1c75b152028df53ea44ee55db11 | lib/version.py | lib/version.py | ELECTRUM_VERSION = "1.9.3" # version of the client package
PROTOCOL_VERSION = '0.6' # protocol version requested
SEED_VERSION = 6 # bump this every time the seed generation is modified
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
TRANSLATION_ID = 4183 # version of the wiki page
| ELECTRUM_VERSION = "1.9.3" # version of the client package
PROTOCOL_VERSION = '0.6' # protocol version requested
SEED_VERSION = 6 # bump this every time the seed generation is modified
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
TRANSLATION_ID = 4190 # version of the wiki page
| Add all missing French translation strings Wikia revision: 4190 | Add all missing French translation strings
Wikia revision: 4190
| Python | mit | dabura667/electrum,protonn/Electrum-Cash,procrasti/electrum,dashpay/electrum-dash,pknight007/electrum-vtc,neocogent/electrum,pooler/electrum-ltc,cryptapus/electrum,protonn/Electrum-Cash,dabura667/electrum,romanz/electrum,cryptapus/electrum-myr,dashpay/electrum-dash,fyookball/electrum,argentumproject/electrum-arg,lbryio/lbryum,digitalbitbox/electrum,dabura667/electrum,fyookball/electrum,argentumproject/electrum-arg,spesmilo/electrum,spesmilo/electrum,FairCoinTeam/electrum-fair,imrehg/electrum,spesmilo/electrum,molecular/electrum,vertcoin/electrum-vtc,dashpay/electrum-dash,vialectrum/vialectrum,cryptapus/electrum-uno,vertcoin/electrum-vtc,aasiutin/electrum,FairCoinTeam/electrum-fair,imrehg/electrum,digitalbitbox/electrum,wakiyamap/electrum-mona,imrehg/electrum,argentumproject/electrum-arg,vertcoin/electrum-vtc,pooler/electrum-ltc,fireduck64/electrum,cryptapus/electrum,dashpay/electrum-dash,aasiutin/electrum,procrasti/electrum,fujicoin/electrum-fjc,FairCoinTeam/electrum-fair,vertcoin/electrum-vtc,wakiyamap/electrum-mona,fireduck64/electrum,imrehg/electrum,kyuupichan/electrum,cryptapus/electrum-uno,protonn/Electrum-Cash,cryptapus/electrum-myr,asfin/electrum,fujicoin/electrum-fjc,romanz/electrum,procrasti/electrum,pknight007/electrum-vtc,neocogent/electrum,neocogent/electrum,cryptapus/electrum-uno,asfin/electrum,fireduck64/electrum,wakiyamap/electrum-mona,pknight007/electrum-vtc,molecular/electrum,pooler/electrum-ltc,kyuupichan/electrum,pooler/electrum-ltc,molecular/electrum,fyookball/electrum,procrasti/electrum,molecular/electrum,lbryio/lbryum,aasiutin/electrum,pknight007/electrum-vtc,cryptapus/electrum-uno,FairCoinTeam/electrum-fair,cryptapus/electrum-myr,wakiyamap/electrum-mona,protonn/Electrum-Cash,cryptapus/electrum-myr,cryptapus/electrum,romanz/electrum,argentumproject/electrum-arg,fireduck64/electrum,asfin/electrum,digitalbitbox/electrum,dabura667/electrum,vialectrum/vialectrum,vialectrum/vialectrum,kyuupichan/electrum,spesmilo/electrum,fujicoin/electrum-fjc,digitalbitbox/electrum,aasiutin/electrum | ---
+++
@@ -2,4 +2,4 @@
PROTOCOL_VERSION = '0.6' # protocol version requested
SEED_VERSION = 6 # bump this every time the seed generation is modified
SEED_PREFIX = '01' # the hash of the mnemonic seed must begin with this
-TRANSLATION_ID = 4183 # version of the wiki page
+TRANSLATION_ID = 4190 # version of the wiki page |
a4b0830b7336694dacc822077c2ce6901be4929b | widgy/contrib/widgy_mezzanine/search_indexes.py | widgy/contrib/widgy_mezzanine/search_indexes.py | from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
| from haystack import indexes
from widgy.contrib.widgy_mezzanine import get_widgypage_model
from widgy.templatetags.widgy_tags import render_root
from widgy.utils import html_to_plaintext
from .signals import widgypage_pre_index
WidgyPage = get_widgypage_model()
class PageIndex(indexes.SearchIndex, indexes.Indexable):
title = indexes.CharField(model_attr='title')
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs):
widgypage_pre_index.send(sender=self)
return super(PageIndex, self).full_prepare(*args, **kwargs)
def get_model(self):
return WidgyPage
def index_queryset(self, using=None):
return self.get_model().objects.published()
def prepare_text(self, obj):
html = render_root({}, obj, 'root_node')
content = html_to_plaintext(html)
keywords = ' '.join(self.prepare_keywords(obj))
return ' '.join([obj.title, keywords, obj.description,
content])
def prepare_keywords(self, obj):
return [unicode(k) for k in obj.keywords.all()]
| Index the URL of the WidgyPage. | Index the URL of the WidgyPage.
This way, you don't have to fetch the page object when you want to put a
link in the search results.
| Python | apache-2.0 | j00bar/django-widgy,j00bar/django-widgy,j00bar/django-widgy | ---
+++
@@ -14,6 +14,7 @@
date = indexes.DateTimeField(model_attr='publish_date')
description = indexes.CharField(model_attr='description')
keywords = indexes.MultiValueField()
+ get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
text = indexes.CharField(document=True)
def full_prepare(self, *args, **kwargs): |
46741fdbda00a8b1574dfdf0689c8a26454d28f6 | actions/cloudbolt_plugins/aws/poll_for_init_complete.py | actions/cloudbolt_plugins/aws/poll_for_init_complete.py | import sys
import time
from infrastructure.models import Server
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
"""
:type server: Server
"""
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
conn = instance.connection
status = conn.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None):
assert isinstance(job, Job)
assert job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
if __name__ == '__main__':
if len(sys.argv) != 2:
print ' Usage: {} <job_id>'.format(sys.argv[0])
sys.exit(1)
print run(Job.objects.get(id=sys.argv[1]))
| import time
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
rh = server.resource_handler.cast()
rh.connect_ec2(ec2_region)
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
status = instance.connection.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
def run(job, logger=None, **kwargs):
assert isinstance(job, Job) and job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
while True:
if is_reachable(server):
job.set_progress("EC2 instance is reachable.")
break
elif time.time() > timeout:
job.set_progress("Waited {} seconds. Continuing...".format(TIMEOUT))
break
else:
time.sleep(2)
return "", "", ""
| Clean up poll for init complete script | Clean up poll for init complete script
| Python | apache-2.0 | CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge | ---
+++
@@ -1,16 +1,11 @@
-import sys
import time
-from infrastructure.models import Server
from jobs.models import Job
TIMEOUT = 600
def is_reachable(server):
- """
- :type server: Server
- """
instance_id = server.ec2serverinfo.instance_id
ec2_region = server.ec2serverinfo.ec2_region
@@ -19,14 +14,12 @@
wc = rh.resource_technology.work_class
instance = wc.get_instance(instance_id)
- conn = instance.connection
- status = conn.get_all_instance_status(instance_id)
+ status = instance.connection.get_all_instance_status(instance_id)
return True if status[0].instance_status.details[u'reachability'] == u'passed' else False
-def run(job, logger=None):
- assert isinstance(job, Job)
- assert job.type == u'provision'
+def run(job, logger=None, **kwargs):
+ assert isinstance(job, Job) and job.type == u'provision'
server = job.server_set.first()
timeout = time.time() + TIMEOUT
@@ -42,11 +35,3 @@
time.sleep(2)
return "", "", ""
-
-
-if __name__ == '__main__':
- if len(sys.argv) != 2:
- print ' Usage: {} <job_id>'.format(sys.argv[0])
- sys.exit(1)
-
- print run(Job.objects.get(id=sys.argv[1])) |
fd77039104175a4b5702b46b21a2fa223676ddf4 | bowser/Database.py | bowser/Database.py | import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
json_data = json.loads(data.decode('utf-8'))
return json_data
| import json
import redis
class Database(object):
def __init__(self):
self.redis = redis.StrictRedis(host='redis', port=6379, db=0)
def set_data_of_server_channel(self, server, channel, data):
self.redis.hmset(server, {channel: json.dumps(data)})
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
if data is None:
raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data
| Raise KeyErrors for missing data in redis | fix: Raise KeyErrors for missing data in redis
| Python | mit | kevinkjt2000/discord-minecraft-server-status | ---
+++
@@ -11,5 +11,7 @@
def fetch_data_of_server_channel(self, server, channel):
data = self.redis.hget(server, channel)
+ if data is None:
+ raise KeyError
json_data = json.loads(data.decode('utf-8'))
return json_data |
3f166b110d4e8623966ca29c71445973da4876f9 | armstrong/hatband/forms.py | armstrong/hatband/forms.py | from django import forms
from django.db import models
from . import widgets
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class BackboneFormMixin(object):
class Media:
js = (
'hatband/js/jquery-1.6.2.min.js',
'hatband/js/underscore.js',
'hatband/js/backbone.js',
'hatband/js/backbone-inline-base.js')
class OrderableGenericKeyLookupForm(BackboneFormMixin, forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": widgets.GenericKeyWidget(),
"order": forms.HiddenInput(),
}
| from django import forms
from django.conf import settings
from django.db import models
from . import widgets
RICH_TEXT_DBFIELD_OVERRIDES = {
models.TextField: {'widget': widgets.RichTextWidget},
}
class BackboneFormMixin(object):
if getattr(settings, "ARMSTRONG_ADMIN_PROVIDE_STATIC", True):
class Media:
js = (
'hatband/js/jquery-1.6.2.min.js',
'hatband/js/underscore.js',
'hatband/js/backbone.js',
'hatband/js/backbone-inline-base.js')
class OrderableGenericKeyLookupForm(BackboneFormMixin, forms.ModelForm):
class Meta:
widgets = {
"content_type": forms.HiddenInput(),
"object_id": widgets.GenericKeyWidget(),
"order": forms.HiddenInput(),
}
| Make it possible to turn off admin JS | Make it possible to turn off admin JS
| Python | apache-2.0 | armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband | ---
+++
@@ -1,4 +1,5 @@
from django import forms
+from django.conf import settings
from django.db import models
from . import widgets
@@ -9,12 +10,13 @@
class BackboneFormMixin(object):
- class Media:
- js = (
- 'hatband/js/jquery-1.6.2.min.js',
- 'hatband/js/underscore.js',
- 'hatband/js/backbone.js',
- 'hatband/js/backbone-inline-base.js')
+ if getattr(settings, "ARMSTRONG_ADMIN_PROVIDE_STATIC", True):
+ class Media:
+ js = (
+ 'hatband/js/jquery-1.6.2.min.js',
+ 'hatband/js/underscore.js',
+ 'hatband/js/backbone.js',
+ 'hatband/js/backbone-inline-base.js')
class OrderableGenericKeyLookupForm(BackboneFormMixin, forms.ModelForm): |
e369824a1bd337e9245d010b93734832af4e0376 | cetacean/response.py | cetacean/response.py | #!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._hal_regex = re.compile(r"application/hal\+json")
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
| #!/usr/bin/env python
# encoding: utf-8
import json
import re
from .resource import Resource
class Response(Resource):
"""Represents an HTTP response that is hopefully a HAL document."""
_hal_regex = re.compile(r"application/hal\+json")
def __init__(self, response):
"""Pass it a Requests response object.
:response: A response object from the Requests library.
"""
self._response = response
self._parsed_hal = None
def is_hal(self):
"""Test if a response was a HAL document or not.
:returns: True or False
"""
return bool(self._hal_regex.match(self._response.headers['content-type']))
@property
def _hal(self):
"""Returns the parsed HAL body of the response
:returns: A parsed HAL body (dicts and lists) or an empty dictionary.
"""
if self._parsed_hal != None: return self._parsed_hal
self._parsed_hal = self._parse_hal()
return self._parsed_hal
def _parse_hal(self):
"""Parses the JSON body of the response.
:returns: A parsed JSON body (dicts and lists) or an empty dictionary.
"""
if not self.is_hal(): return {}
try:
return json.loads(self._response.content)
except ValueError, e:
return {}
| Move _hal_regex to class scope. | Move _hal_regex to class scope.
| Python | mit | nanorepublica/cetacean-python,benhamill/cetacean-python | ---
+++
@@ -10,6 +10,8 @@
"""Represents an HTTP response that is hopefully a HAL document."""
+ _hal_regex = re.compile(r"application/hal\+json")
+
def __init__(self, response):
"""Pass it a Requests response object.
@@ -17,7 +19,6 @@
"""
self._response = response
- self._hal_regex = re.compile(r"application/hal\+json")
self._parsed_hal = None
def is_hal(self): |
46db910f9b9a150b785ea3b36a9e4f73db326d78 | loader.py | loader.py | from etl import get_local_handles, ingest_feeds, CSV_ETL_CLASSES
from local import LocalConfig
from interface import Marcotti
if __name__ == "__main__":
settings = LocalConfig()
marcotti = Marcotti(settings)
with marcotti.create_session() as sess:
for entity, etl_class in CSV_ETL_CLASSES:
data_file = settings.CSV_DATA[entity]
if data_file is None:
continue
if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
else:
params = (sess,)
ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
| import os
import logging
from etl import get_local_handles, ingest_feeds, CSV_ETL_CLASSES
from local import LocalConfig
from interface import Marcotti
LOG_FORMAT = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s')
ch = logging.FileHandler(os.path.join(LocalConfig().LOG_DIR, 'marcotti.log'))
ch.setLevel(logging.INFO)
ch.setFormatter(LOG_FORMAT)
logger = logging.getLogger('loader')
logger.setLevel(logging.INFO)
logger.addHandler(ch)
if __name__ == "__main__":
settings = LocalConfig()
marcotti = Marcotti(settings)
logger.info("Data ingestion start")
with marcotti.create_session() as sess:
for entity, etl_class in CSV_ETL_CLASSES:
data_file = settings.CSV_DATA[entity]
if data_file is None:
logger.info("Skipping ingestion into %s data model", entity)
else:
if type(data_file) is list:
data_file = os.path.join(*data_file)
logger.info("Ingesting %s into %s data model",
os.path.join(settings.CSV_DATA_DIR, data_file), entity)
if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
else:
params = (sess,)
ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
logger.info("Data ingestion complete")
| Add logging messages to data ingestion tool | Add logging messages to data ingestion tool
| Python | mit | soccermetrics/marcotti-mls | ---
+++
@@ -1,18 +1,38 @@
+import os
+import logging
+
from etl import get_local_handles, ingest_feeds, CSV_ETL_CLASSES
from local import LocalConfig
from interface import Marcotti
+LOG_FORMAT = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s: %(message)s')
+ch = logging.FileHandler(os.path.join(LocalConfig().LOG_DIR, 'marcotti.log'))
+ch.setLevel(logging.INFO)
+ch.setFormatter(LOG_FORMAT)
+
+logger = logging.getLogger('loader')
+logger.setLevel(logging.INFO)
+logger.addHandler(ch)
+
+
if __name__ == "__main__":
settings = LocalConfig()
marcotti = Marcotti(settings)
+ logger.info("Data ingestion start")
with marcotti.create_session() as sess:
for entity, etl_class in CSV_ETL_CLASSES:
data_file = settings.CSV_DATA[entity]
if data_file is None:
- continue
- if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
- params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
+ logger.info("Skipping ingestion into %s data model", entity)
else:
- params = (sess,)
- ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
+ if type(data_file) is list:
+ data_file = os.path.join(*data_file)
+ logger.info("Ingesting %s into %s data model",
+ os.path.join(settings.CSV_DATA_DIR, data_file), entity)
+ if entity in ['Salaries', 'Partials', 'FieldStats', 'GkStats', 'LeaguePoints']:
+ params = (sess, settings.COMPETITION_NAME, settings.SEASON_NAME)
+ else:
+ params = (sess,)
+ ingest_feeds(get_local_handles, settings.CSV_DATA_DIR, data_file, etl_class(*params))
+ logger.info("Data ingestion complete") |
dfc6b2d2d8cda75349dfab33d9639b5ea24cc520 | contentcuration/contentcuration/ricecooker_versions.py | contentcuration/contentcuration/ricecooker_versions.py | import xmlrpclib
from socket import gaierror
VERSION_OK = "0.5.13"
try:
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
VERSION_OK = pypi.package_releases('ricecooker')[0]
except gaierror:
pass
VERSION_OK_MESSAGE = "Ricecooker v{} is up-to-date."
VERSION_SOFT_WARNING = "0.5.6"
VERSION_SOFT_WARNING_MESSAGE = "You are using Ricecooker v{}, however v{} is available. You should consider upgrading your Ricecooker."
VERSION_HARD_WARNING = "0.3.13"
VERSION_HARD_WARNING_MESSAGE = "Ricecooker v{} is deprecated. Any channels created with this version will be unlinked with any future upgrades. You are strongly recommended to upgrade to v{}."
VERSION_ERROR = None
VERSION_ERROR_MESSAGE = "Ricecooker v{} is no longer compatible. You must upgrade to v{} to continue."
| import xmlrpclib
from socket import gaierror, error
VERSION_OK = "0.6.0"
try:
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
VERSION_OK = pypi.package_releases('ricecooker')[0]
except (gaierror, error):
pass
VERSION_OK_MESSAGE = "Ricecooker v{} is up-to-date."
VERSION_SOFT_WARNING = "0.5.6"
VERSION_SOFT_WARNING_MESSAGE = "You are using Ricecooker v{}, however v{} is available. You should consider upgrading your Ricecooker."
VERSION_HARD_WARNING = "0.3.13"
VERSION_HARD_WARNING_MESSAGE = "Ricecooker v{} is deprecated. Any channels created with this version will be unlinked with any future upgrades. You are strongly recommended to upgrade to v{}."
VERSION_ERROR = None
VERSION_ERROR_MESSAGE = "Ricecooker v{} is no longer compatible. You must upgrade to v{} to continue."
| Add error handling to reduce dependency on pypi | Add error handling to reduce dependency on pypi
| Python | mit | DXCanas/content-curation,DXCanas/content-curation,jayoshih/content-curation,jayoshih/content-curation,jayoshih/content-curation,jayoshih/content-curation,fle-internal/content-curation,fle-internal/content-curation,fle-internal/content-curation,fle-internal/content-curation,DXCanas/content-curation,DXCanas/content-curation | ---
+++
@@ -1,12 +1,12 @@
import xmlrpclib
-from socket import gaierror
+from socket import gaierror, error
-VERSION_OK = "0.5.13"
+VERSION_OK = "0.6.0"
try:
pypi = xmlrpclib.ServerProxy('https://pypi.python.org/pypi')
VERSION_OK = pypi.package_releases('ricecooker')[0]
-except gaierror:
+except (gaierror, error):
pass
VERSION_OK_MESSAGE = "Ricecooker v{} is up-to-date." |
6a83ff3a2d1aca0a3663a36ca9502d3d86ea2a93 | pirx/base.py | pirx/base.py | class Settings(object):
def __init__(self):
self._settings = {}
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def write(self):
for name, value in self._settings.iteritems():
print '%s = %s' % (name.upper(), value.__repr__())
| import collections
class Settings(object):
def __init__(self):
self._settings = collections.OrderedDict()
def __setattr__(self, name, value):
if name.startswith('_'):
super(Settings, self).__setattr__(name, value)
else:
self._settings[name] = value
def write(self):
for name, value in self._settings.iteritems():
print '%s = %s' % (name.upper(), value.__repr__())
| Store settings with the OrderedDict | Store settings with the OrderedDict
| Python | mit | piotrekw/pirx | ---
+++
@@ -1,6 +1,9 @@
+import collections
+
+
class Settings(object):
def __init__(self):
- self._settings = {}
+ self._settings = collections.OrderedDict()
def __setattr__(self, name, value):
if name.startswith('_'): |
3188b77e07bc060cfdd3db8eadbf0fbe1e34b72e | orlo/config.py | orlo/config.py | from __future__ import print_function
import ConfigParser
__author__ = 'alforbes'
config = ConfigParser.ConfigParser()
config.add_section('main')
config.set('main', 'debug_mode', 'false')
config.set('main', 'propagate_exceptions', 'true')
config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ')
config.set('main', 'time_zone', 'UTC')
config.set('main', 'strict_slashes', 'false')
config.set('main', 'base_url', 'http://localhost:5000')
config.add_section('db')
config.set('db', 'uri', 'postgres://orlo:password@localhost:5432/orlo')
config.set('db', 'echo_queries', 'false')
config.add_section('logging')
config.set('logging', 'debug', 'false')
config.set('logging', 'file', 'disabled')
config.read('/etc/orlo/orlo.ini')
config.add_section('deploy_shell')
config.set('deploy_shell', 'command_path', '/vagrant/deployer.rb')
| from __future__ import print_function
import ConfigParser
__author__ = 'alforbes'
config = ConfigParser.ConfigParser()
config.add_section('main')
config.set('main', 'debug_mode', 'false')
config.set('main', 'propagate_exceptions', 'true')
config.set('main', 'time_format', '%Y-%m-%dT%H:%M:%SZ')
config.set('main', 'time_zone', 'UTC')
config.set('main', 'strict_slashes', 'false')
config.set('main', 'base_url', 'http://localhost:5000')
config.add_section('db')
config.set('db', 'uri', 'postgres://orlo:password@localhost:5432/orlo')
config.set('db', 'echo_queries', 'false')
config.add_section('logging')
config.set('logging', 'debug', 'false')
config.set('logging', 'file', 'disabled')
config.read('/etc/orlo/orlo.ini')
config.add_section('deploy_shell')
config.set('deploy_shell', 'command_path', '../deployer.rb')
| Use relative path for deployer.rb | Use relative path for deployer.rb
| Python | mit | eBayClassifiedsGroup/orlo,al4/sponge,al4/orlo,al4/sponge,al4/sponge,eBayClassifiedsGroup/orlo,eBayClassifiedsGroup/sponge,eBayClassifiedsGroup/sponge,eBayClassifiedsGroup/sponge,al4/orlo | ---
+++
@@ -23,5 +23,5 @@
config.read('/etc/orlo/orlo.ini')
config.add_section('deploy_shell')
-config.set('deploy_shell', 'command_path', '/vagrant/deployer.rb')
+config.set('deploy_shell', 'command_path', '../deployer.rb')
|
6cfc9de7fe8fd048a75845a69bdeefc7c742bae4 | oneall/django_oneall/management/commands/emaillogin.py | oneall/django_oneall/management/commands/emaillogin.py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "E-mail login without sending the actual e-mail."
def add_arguments(self, parser):
parser.add_argument('email', type=str)
def handle(self, email, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return 1
query_string = EmailTokenAuthBackend().issue(email)
self.stdout.write("Complete login with: %s?%s" % (reverse('oneall-login'), query_string))
| # -*- coding: utf-8 -*-
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "Issues an e-mail login token."
def add_arguments(self, parser):
parser.add_argument('-s', '--send', dest='send', action='store_true',
help="Actually e-mail the token instead of only displaying it.")
parser.add_argument('email', type=str)
def handle(self, email, send, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return
query_string = EmailTokenAuthBackend().issue(email)
msg = "Complete login with: %s?%s" % (reverse('oneall-login'), query_string)
self.stdout.write(msg)
if send:
mail = EmailMessage()
mail.to = [email]
mail.subject = 'Login Test'
mail.body = msg
try:
sent = mail.send()
self.stdout.write("Sent %d message." % sent)
except ConnectionError as e:
self.stderr.write(str(e))
| Add the possibility of testing SMTP from the command-line. | Add the possibility of testing SMTP from the command-line.
| Python | mit | leandigo/django-oneall,ckot/django-oneall,leandigo/django-oneall,ckot/django-oneall | ---
+++
@@ -1,4 +1,5 @@
# -*- coding: utf-8 -*-
+from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
@@ -6,14 +7,27 @@
class Command(BaseCommand):
- help = "E-mail login without sending the actual e-mail."
+ help = "Issues an e-mail login token."
def add_arguments(self, parser):
+ parser.add_argument('-s', '--send', dest='send', action='store_true',
+ help="Actually e-mail the token instead of only displaying it.")
parser.add_argument('email', type=str)
- def handle(self, email, **options):
+ def handle(self, email, send, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
- return 1
+ return
query_string = EmailTokenAuthBackend().issue(email)
- self.stdout.write("Complete login with: %s?%s" % (reverse('oneall-login'), query_string))
+ msg = "Complete login with: %s?%s" % (reverse('oneall-login'), query_string)
+ self.stdout.write(msg)
+ if send:
+ mail = EmailMessage()
+ mail.to = [email]
+ mail.subject = 'Login Test'
+ mail.body = msg
+ try:
+ sent = mail.send()
+ self.stdout.write("Sent %d message." % sent)
+ except ConnectionError as e:
+ self.stderr.write(str(e)) |
7a936665eff8a6a8f6889334ad2238cbfcded18b | member.py | member.py | import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
| import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
| Return the order details URL from email body. | Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
| Python | mit | deadlyraptor/reels | ---
+++
@@ -1,13 +1,14 @@
import requests
-from credentials import label_id
+from base64 import urlsafe_b64decode
+from credentials import label_id, url1, url2
from gmailauth import refresh
-access_token = refresh()
+# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
- params = {'labelIds': label_id, 'q': 'newer_than:3d'}
+ params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
@@ -21,24 +22,20 @@
message_ids.append(item['id'])
return message_ids
-print(list_messages(headers))
-
def get_message(headers, identity):
- params = {'id': identity, format: 'metadata'}
+ params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
- print(r.status_code, r.reason)
- h = j['payload']
- subject = ''
- for header in h['headers']:
- if header['name'] == 'Subject':
- subject = header['value']
- break
- print(subject)
+ raw = j['raw']
+ d = urlsafe_b64decode(raw)
+ p = d.decode()
+ s = p.find('https')
+ l = len(p)
+ print(p[s:l])
+ print('----------')
+ return(p[s:l])
-for item in list_messages(headers):
- get_message(headers, item)
-
-# get_message(headers, list_messages(headers))
+# for item in list_messages(headers):
+# get_message(headers, item) |
2cf6318c18620064898cdf5e940a3fc732d976f8 | src/lighter/hipchat.py | src/lighter/hipchat.py | import logging, urllib2
from lighter.util import merge, build_request
class HipChat(object):
def __init__(self, url, token):
self._url = url or 'https://api.hipchat.com'
self._token = token
self_rooms = []
self._sender = 'Lighter'
self._message_attribs = {
'from': 'Lighter',
'color': 'green',
'notify': True,
'message_format': 'html'
}
def rooms(self, ids):
self._rooms = ids
return self
def notify(self, message):
for room in self._rooms:
self._call('/v2/room/%s/notification' % room, merge({'message': message}, self._message_attribs))
def _call(self, endpoint, data):
if self._url is None or self._token is None:
logging.debug('HipChat is not enabled')
return
try:
url = self._url.rstrip('/') + '/' + endpoint + '?auth_token=' + self._token
logging.debug('Calling HipChat endpoint %s', endpoint)
response = urllib2.urlopen(build_request(url, data, {}, 'POST'))
content = response.read()
except urllib2.URLError, e:
logging.warn(str(e))
return {}
| import logging, urllib2
from lighter.util import merge, build_request
class HipChat(object):
def __init__(self, url, token):
self._url = url or 'https://api.hipchat.com'
self._token = token
self_rooms = []
self._sender = 'Lighter'
self._message_attribs = {
'color': 'green',
'notify': True,
'message_format': 'html'
}
def rooms(self, ids):
self._rooms = ids
return self
def notify(self, message):
for room in self._rooms:
self._call('/v2/room/%s/notification' % room, merge({'message': message}, self._message_attribs))
def _call(self, endpoint, data):
if self._url is None or self._token is None:
logging.debug('HipChat is not enabled')
return
try:
url = self._url.rstrip('/') + '/' + endpoint + '?auth_token=' + self._token
logging.debug('Calling HipChat endpoint %s', endpoint)
response = urllib2.urlopen(build_request(url, data, {}, 'POST'))
content = response.read()
except urllib2.URLError, e:
logging.warn(str(e))
return {}
| Remove extra label for user | Remove extra label for user
| Python | mit | meltwater/lighter,meltwater/lighter | ---
+++
@@ -8,7 +8,6 @@
self_rooms = []
self._sender = 'Lighter'
self._message_attribs = {
- 'from': 'Lighter',
'color': 'green',
'notify': True,
'message_format': 'html' |
ed11fa0ebc365b8a7b0f31c8b09bf23b891e44b6 | discover_tests.py | discover_tests.py | """
Simple auto test discovery.
From http://stackoverflow.com/a/17004409
"""
import os
import sys
import unittest
def additional_tests():
setup_file = sys.modules['__main__'].__file__
setup_dir = os.path.abspath(os.path.dirname(setup_file))
return unittest.defaultTestLoader.discover(setup_dir)
| """
Simple auto test discovery.
From http://stackoverflow.com/a/17004409
"""
import os
import sys
import unittest
if not hasattr(unittest.defaultTestLoader, 'discover'):
import unittest2 as unittest
def additional_tests():
setup_file = sys.modules['__main__'].__file__
setup_dir = os.path.abspath(os.path.dirname(setup_file))
return unittest.defaultTestLoader.discover(setup_dir)
| Allow test discovery on Py26 with unittest2 | Allow test discovery on Py26 with unittest2
| Python | mit | QuLogic/python-future,michaelpacer/python-future,PythonCharmers/python-future,QuLogic/python-future,krischer/python-future,PythonCharmers/python-future,krischer/python-future,michaelpacer/python-future | ---
+++
@@ -7,6 +7,9 @@
import sys
import unittest
+if not hasattr(unittest.defaultTestLoader, 'discover'):
+ import unittest2 as unittest
+
def additional_tests():
setup_file = sys.modules['__main__'].__file__
setup_dir = os.path.abspath(os.path.dirname(setup_file)) |
63f04662f5ca22443ab6080f559ac898302cf103 | tests/integration/conftest.py | tests/integration/conftest.py | def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
final_list = []
on_redeploy_tests = []
for item in items:
if item.get_marker('on_redeploy') is not None:
on_redeploy_tests.append(item)
else:
final_list.append(item)
final_list.extend(on_redeploy_tests)
items[:] = final_list
| DEPLOY_TEST_BASENAME = 'test_features.py'
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
marked = []
unmarked = []
for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
marked.append(item)
else:
unmarked.append(item)
items[start:end] = unmarked + marked
def _get_start_end_index(basename, items):
# precondition: all the tests for test_features.py are
# in a contiguous range. This is the case because pytest
# will group all tests in a module together.
matched = [item.fspath.basename == basename for item in items]
return (
matched.index(True),
len(matched) - list(reversed(matched)).index(True)
)
| Reorder redeploy tests within a single module | Reorder redeploy tests within a single module
The original code for on_redeploy was making the
assumption that there was only one integration test file.
When test_package.py was added, the tests always failed
because the redeploy tests were run *after* the package tests
which messed with the module scope fixtures.
Now we ensure we only reorder tests within test_features.py.
| Python | apache-2.0 | awslabs/chalice | ---
+++
@@ -1,12 +1,26 @@
+DEPLOY_TEST_BASENAME = 'test_features.py'
+
+
def pytest_collection_modifyitems(session, config, items):
# Ensure that all tests with require a redeploy are run after
# tests that don't need a redeploy.
- final_list = []
- on_redeploy_tests = []
- for item in items:
+ start, end = _get_start_end_index(DEPLOY_TEST_BASENAME, items)
+ marked = []
+ unmarked = []
+ for item in items[start:end]:
if item.get_marker('on_redeploy') is not None:
- on_redeploy_tests.append(item)
+ marked.append(item)
else:
- final_list.append(item)
- final_list.extend(on_redeploy_tests)
- items[:] = final_list
+ unmarked.append(item)
+ items[start:end] = unmarked + marked
+
+
+def _get_start_end_index(basename, items):
+ # precondition: all the tests for test_features.py are
+ # in a contiguous range. This is the case because pytest
+ # will group all tests in a module together.
+ matched = [item.fspath.basename == basename for item in items]
+ return (
+ matched.index(True),
+ len(matched) - list(reversed(matched)).index(True)
+ ) |
859a82df7804f181ffc12052a4af59ab52a44560 | test/goldstandard/benchmark_confirm.py | test/goldstandard/benchmark_confirm.py | import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
| import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
| Set up reading lines from file, print count for testing purposes | Set up reading lines from file, print count for testing purposes
| Python | bsd-2-clause | yngcan/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor | ---
+++
@@ -28,3 +28,15 @@
count = 0
errors = 0
success = 0
+
+ while True:
+
+ line_read = opened_file.readline()
+ # print line_read
+
+ if not line_read:
+ print "EXITING"
+ break
+ count = count + 1
+ if count%100 == 0:
+ print "starting patent", count |
7a0c0e6ed56e847b7b6300c1a0b4a427f26b296d | app/PRESUBMIT.py | app/PRESUBMIT.py | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
| #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
| Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build. | Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: dd90618784b6a4b323ea0c23a071cb1c9e6f2ac7@51000 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
| Python | bsd-3-clause | wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser,wistoch/meego-app-browser | ---
+++
@@ -23,3 +23,6 @@
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
+
+def GetPreferredTrySlaves():
+ return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac'] |
d42314b323aa0f8c764d72a5ebebc0e7d5ac88f3 | nova/api/openstack/compute/schemas/v3/create_backup.py | nova/api/openstack/compute/schemas/v3/create_backup.py | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
| # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'create_backup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
},
'rotation': {
'type': ['integer', 'string'],
'pattern': '^[0-9]+$',
'minimum': 0,
},
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['create_backup'],
'additionalProperties': False,
}
| Remove param check for backup type on v2.1 API | Remove param check for backup type on v2.1 API
The backup type is only used by glance, so nova check it make
no sense; currently we have daily and weekly as only valid param
but someone may add 'monthly' as param. nova should allow it
and delegate the error. This patch removes check on v2.1 API.
Change-Id: I59bbc0f589c8c280eb8cd87aa279898fffaeab7a
Closes-Bug: #1361490
| Python | apache-2.0 | devendermishrajio/nova,affo/nova,projectcalico/calico-nova,whitepages/nova,klmitch/nova,jianghuaw/nova,cernops/nova,Stavitsky/nova,fnordahl/nova,blueboxgroup/nova,CEG-FYP-OpenStack/scheduler,Francis-Liu/animated-broccoli,j-carpentier/nova,joker946/nova,hanlind/nova,rajalokan/nova,zhimin711/nova,silenceli/nova,ruslanloman/nova,isyippee/nova,akash1808/nova_test_latest,BeyondTheClouds/nova,belmiromoreira/nova,yatinkumbhare/openstack-nova,mmnelemane/nova,BeyondTheClouds/nova,JioCloud/nova_test_latest,mmnelemane/nova,mikalstill/nova,double12gzh/nova,sebrandon1/nova,cloudbase/nova-virtualbox,phenoxim/nova,devendermishrajio/nova_test_latest,NeCTAR-RC/nova,JioCloud/nova,apporc/nova,ruslanloman/nova,jianghuaw/nova,rahulunair/nova,whitepages/nova,adelina-t/nova,blueboxgroup/nova,noironetworks/nova,alaski/nova,adelina-t/nova,rahulunair/nova,felixma/nova,Juniper/nova,iuliat/nova,alexandrucoman/vbox-nova-driver,Tehsmash/nova,dims/nova,orbitfp7/nova,tealover/nova,yosshy/nova,mahak/nova,CEG-FYP-OpenStack/scheduler,JianyuWang/nova,cernops/nova,sebrandon1/nova,akash1808/nova_test_latest,Juniper/nova,varunarya10/nova_test_latest,Juniper/nova,double12gzh/nova,devendermishrajio/nova_test_latest,tudorvio/nova,BeyondTheClouds/nova,felixma/nova,alexandrucoman/vbox-nova-driver,cyx1231st/nova,CloudServer/nova,projectcalico/calico-nova,iuliat/nova,openstack/nova,mahak/nova,vmturbo/nova,eonpatapon/nova,jeffrey4l/nova,cloudbase/nova,rajalokan/nova,yosshy/nova,vmturbo/nova,nikesh-mahalka/nova,mandeepdhami/nova,mgagne/nova,mahak/nova,TwinkleChawla/nova,CloudServer/nova,belmiromoreira/nova,thomasem/nova,shail2810/nova,devendermishrajio/nova,JioCloud/nova_test_latest,NeCTAR-RC/nova,jeffrey4l/nova,cloudbase/nova-virtualbox,openstack/nova,zhimin711/nova,gooddata/openstack-nova,zzicewind/nova,Metaswitch/calico-nova,joker946/nova,LoHChina/nova,cyx1231st/nova,jianghuaw/nova,ted-gould/nova,raildo/nova,zaina/nova,ted-gould/nova,zaina/nova,petrutlucian94/nova,jianghuaw/nova,phenoxim/nova,mandeepdhami/nova,Yusuke1987/openstack_template,rajalokan/nova,cloudbase/nova,vmturbo/nova,affo/nova,akash1808/nova,gooddata/openstack-nova,bgxavier/nova,Stavitsky/nova,rajalokan/nova,MountainWei/nova,bgxavier/nova,fnordahl/nova,scripnichenko/nova,hanlind/nova,yatinkumbhare/openstack-nova,sebrandon1/nova,Francis-Liu/animated-broccoli,watonyweng/nova,barnsnake351/nova,JioCloud/nova,TwinkleChawla/nova,raildo/nova,bigswitch/nova,Tehsmash/nova,varunarya10/nova_test_latest,CCI-MOC/nova,shail2810/nova,gooddata/openstack-nova,dims/nova,zzicewind/nova,vmturbo/nova,tealover/nova,mgagne/nova,alvarolopez/nova,eonpatapon/nova,barnsnake351/nova,klmitch/nova,mikalstill/nova,rahulunair/nova,apporc/nova,edulramirez/nova,openstack/nova,isyippee/nova,tudorvio/nova,Metaswitch/calico-nova,JianyuWang/nova,kimjaejoong/nova,hanlind/nova,kimjaejoong/nova,bigswitch/nova,tangfeixiong/nova,petrutlucian94/nova,klmitch/nova,gooddata/openstack-nova,scripnichenko/nova,MountainWei/nova,cernops/nova,orbitfp7/nova,takeshineshiro/nova,klmitch/nova,edulramirez/nova,akash1808/nova,j-carpentier/nova,tangfeixiong/nova,silenceli/nova,nikesh-mahalka/nova,dawnpower/nova,CCI-MOC/nova,takeshineshiro/nova,LoHChina/nova,Juniper/nova,alaski/nova,noironetworks/nova,alvarolopez/nova,Yusuke1987/openstack_template,watonyweng/nova,thomasem/nova,cloudbase/nova,mikalstill/nova,dawnpower/nova | ---
+++
@@ -24,7 +24,6 @@
'name': parameter_types.name,
'backup_type': {
'type': 'string',
- 'enum': ['daily', 'weekly'],
},
'rotation': {
'type': ['integer', 'string'], |
720c6dbf9831b2b2ff701d0ca88303189583b9c4 | opps/api/__init__.py | opps/api/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponse
from django.contrib.auth import authenticate
from piston.handler import BaseHandler as Handler
from opps.api.models import ApiKey
class BaseHandler(Handler):
def read(self, request):
base = self.model.objects
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
def appendModel(Model, Filters):
m = Model.objects.filter(**Filters)
l = []
for i in m:
l.append(i.__dict__)
return l
class ApiKeyAuthentication(object):
def __init__(self, auth_func=authenticate, method=['GET']):
self.auth_func = auth_func
self.method = method
def is_authenticated(self, request):
if request.method == 'GET' and 'GET' in self.method:
return True
try:
method = getattr(request, request.method)
except:
method = request.GET
try:
ApiKey.objects.get(
user__username=method.get('api_username'),
key=method.get('api_key'))
except ApiKey.DoesNotExist:
return False
return True
def challenge(self):
resp = HttpResponse("Authorization Required")
resp.status_code = 401
return resp
| Add method appendModel on api BaseHandler | Add method appendModel on api BaseHandler
| Python | mit | YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps | ---
+++
@@ -15,6 +15,13 @@
if request.GET.items():
return base.filter(**request.GET.dict())
return base.all()
+
+ def appendModel(Model, Filters):
+ m = Model.objects.filter(**Filters)
+ l = []
+ for i in m:
+ l.append(i.__dict__)
+ return l
class ApiKeyAuthentication(object): |
09c3e765075b7207ca09116330496cf56472d099 | talks/settings_test.py | talks/settings_test.py | from talks.settings import *
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
DEBUG = True
RAVEN_CONFIG = {}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
API_OX_PLACES_URL = '/static/mock/oxpoints.json'
TOPICS_URL = '/static/mock/topics.json?'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'oxpoints': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'topics': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
DEBUG = True
TEMPLATE_DEBUG = DEBUG
| from talks.settings import *
INSTALLED_APPS += ('django_nose',)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
DEBUG = True
RAVEN_CONFIG = {}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
API_OX_PLACES_URL = '/static/mock/oxpoints.json'
API_OX_DATES_URL = API_OX_PLACES_URL # faking the response for dates
TOPICS_URL = '/static/mock/topics.json?'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'oxpoints': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
'topics': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
DEBUG = True
TEMPLATE_DEBUG = DEBUG
| Fix the tests by mocking the response | Fix the tests by mocking the response
| Python | apache-2.0 | ox-it/talks.ox,ox-it/talks.ox,ox-it/talks.ox | ---
+++
@@ -18,6 +18,7 @@
)
API_OX_PLACES_URL = '/static/mock/oxpoints.json'
+API_OX_DATES_URL = API_OX_PLACES_URL # faking the response for dates
TOPICS_URL = '/static/mock/topics.json?'
CACHES = { |
54c4e434276b242de56529e63bb6c5c61d891412 | indico/modules/events/surveys/tasks.py | indico/modules/events/surveys/tasks.py | # This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
Survey.notifications_enabled)
try:
for survey in opened_surveys:
survey.send_start_notification()
finally:
db.session.commit()
| # This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from celery.schedules import crontab
from indico.core.celery import celery
from indico.core.db import db
from indico.modules.events.surveys.models.surveys import Survey
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
try:
for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit()
| Use safer condition for survey start notification | Use safer condition for survey start notification
| Python | mit | mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,ThiefMaster/indico,indico/indico,mic4ael/indico,indico/indico,OmeGak/indico,OmeGak/indico,pferreir/indico,DirkHoffmann/indico,mic4ael/indico,DirkHoffmann/indico,OmeGak/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,pferreir/indico,ThiefMaster/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,OmeGak/indico | ---
+++
@@ -25,10 +25,9 @@
@celery.periodic_task(name='survey_start_notifications', run_every=crontab(minute='*/30'))
def send_start_notifications():
- opened_surveys = Survey.find_all(~Survey.is_deleted, ~Survey.start_notification_sent, Survey.has_started,
- Survey.notifications_enabled)
+ active_surveys = Survey.find_all(Survey.is_active, ~Survey.start_notification_sent, Survey.notifications_enabled)
try:
- for survey in opened_surveys:
+ for survey in active_surveys:
survey.send_start_notification()
finally:
db.session.commit() |
fff234587be9b63270b345345f607df381031bdc | opendebates/tests/test_context_processors.py | opendebates/tests/test_context_processors.py | import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
| import urlparse
from django.test import TestCase, override_settings
from mock import patch, Mock
from opendebates.context_processors import global_vars
from opendebates.tests.factories import SubmissionFactory
class NumberOfVotesTest(TestCase):
def test_number_of_votes(self):
mock_request = Mock()
with patch('opendebates.utils.cache') as mock_cache:
mock_cache.get.return_value = 2
context = global_vars(mock_request)
self.assertEqual(2, int(context['NUMBER_OF_VOTES']))
class ThemeTests(TestCase):
def setUp(self):
self.idea = SubmissionFactory()
@override_settings(SITE_THEME={
'EMAIL_SUBJECT': 'THE EMAIL SUBJECT',
'EMAIL_BODY': 'THE EMAIL BODY\nAND SECOND LINE',
})
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
self.assertEqual('THE EMAIL SUBJECT', fields['subject'][0], fields['subject'][0])
self.assertEqual('THE EMAIL BODY\nAND SECOND LINE', fields['body'][0], fields['body'][0])
| Fix test_email_url() after changes to email templating for sharing emails | Fix test_email_url() after changes to email templating for sharing emails
| Python | apache-2.0 | caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates,ejucovy/django-opendebates,caktus/django-opendebates,ejucovy/django-opendebates | ---
+++
@@ -21,9 +21,13 @@
def setUp(self):
self.idea = SubmissionFactory()
- @override_settings(SITE_THEME={'HASHTAG': 'TestHashtag'})
+ @override_settings(SITE_THEME={
+ 'EMAIL_SUBJECT': 'THE EMAIL SUBJECT',
+ 'EMAIL_BODY': 'THE EMAIL BODY\nAND SECOND LINE',
+ })
def test_email_url(self):
email_url = self.idea.email_url()
fields = urlparse.parse_qs(urlparse.urlparse(email_url).query)
self.assertTrue('subject' in fields, fields)
- self.assertTrue('#TestHashtag' in fields['subject'][0], fields['subject'][0])
+ self.assertEqual('THE EMAIL SUBJECT', fields['subject'][0], fields['subject'][0])
+ self.assertEqual('THE EMAIL BODY\nAND SECOND LINE', fields['body'][0], fields['body'][0]) |
d0461fa033bdca4fffeff718219f8b71123449d7 | pskb_website/models/__init__.py | pskb_website/models/__init__.py | """
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import save_article_meta_data
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .file import published_articles
from .file import in_review_articles
from .file import draft_articles
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
| """
Public model API
"""
from .article import search_for_article
from .article import get_available_articles
from .article import read_article
from .article import save_article
from .article import delete_article
from .article import branch_article
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
from .user import find_user
from .email_list import add_subscriber
from .image import save_image
from .lib import to_json
| Remove some functions from exported model API that are not used outside model layer | Remove some functions from exported model API that are not used outside model layer
- Just some refactoring to trim down the number of things exported that aren't
necessary at this time.
| Python | agpl-3.0 | paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms | ---
+++
@@ -11,16 +11,12 @@
from .article import branch_or_save_article
from .article import get_articles_for_author
from .article import get_public_articles_for_author
-from .article import save_article_meta_data
from .article import find_article_by_title
from .article import change_article_stack
from .file import read_file
from .file import read_redirects
from .file import update_article_listing
-from .file import published_articles
-from .file import in_review_articles
-from .file import draft_articles
from .user import find_user
|
3a4ed7370455467240647a15a236c18012e9a5f1 | Problem054/Python/solution_1.py | Problem054/Python/solution_1.py | from collections import Counter
hands = [line.split(' ') for line in open('p054_poker.txt')]
values = {'2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, 'T': 10, 'J': 11, 'Q': 12, 'K': 13, 'A': 14}
straights = [(v, v - 1, v - 2, v - 3, v - 4) for v in range(14, 5, -1)]
ranks = [(1, 1, 1, 1, 1), (2, 1, 1, 1), (2, 2, 1), (3, 1, 1), (3, 2), (4, 1)]
def calculate_rank(hand):
score = list(zip(*sorted(((v, values[k]) for k, v in Counter(x[0] for x in hand).items()), reverse=True)))
score[0] = ranks.index(score[0])
if len(set(card[1] for card in hand)) == 1:
score[0] = 5 # flush
if score[1] in straights:
score[0] = 4 # straight
return score
p1_wins = sum(calculate_rank(hand[:5]) > calculate_rank(hand[5:]) for hand in hands)
print(f'{p1_wins}')
| from collections import Counter
hands = [line.split(' ') for line in open('p054_poker.txt')]
values = {'2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, 'T': 10, 'J': 11, 'Q': 12, 'K': 13, 'A': 14}
straights = [(v, v - 1, v - 2, v - 3, v - 4) for v in range(14, 5, -1)]
ranks = [(1, 1, 1, 1, 1), (2, 1, 1, 1), (2, 2, 1), (3, 1, 1), (3, 2), (4, 1)]
def calculate_rank(hand):
score = list(zip(*sorted(((v, values[k]) for k, v in Counter(x[0] for x in hand).items()), reverse=True)))
score[0] = ranks.index(score[0])
if len(set(card[1] for card in hand)) == 1:
score[0] = 5 # flush
if score[1] in straights:
score[0] = 4 # straight
return score
p1_wins = sum(calculate_rank(hand[:5]) > calculate_rank(hand[5:]) for hand in hands)
print(p1_wins)
| Make it compatible with Python 3.5+ | Make it compatible with Python 3.5+
Remove unnecessary usage of f-strings. | Python | mit | DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler,DestructHub/ProjectEuler | ---
+++
@@ -17,4 +17,4 @@
p1_wins = sum(calculate_rank(hand[:5]) > calculate_rank(hand[5:]) for hand in hands)
-print(f'{p1_wins}')
+print(p1_wins) |
02ef7f864c437e92fd8f2734d8325cca843e6341 | test_loading.py | test_loading.py | import random
from dmp import dmp
users = ["adam", "ben", "chris", "denis", "eric"]
file_types = ["fastq", "fasta", "bam", "bed", "hdf5", "tsv", "wig", "pdb"]
data_types = ['RNA-seq', 'MNase-Seq', 'ChIP-seq', 'WGBS', 'HiC']
compressed = [None, 'gzip', 'zip']
da = dmp(test=True)
for i in xrange(10):
u = random.choice(users)
ft = random.choice(file_types)
dt = random.choice(data_types)
z = random.choice(compressed)
f = '/tmp/test/' + dt + '/test_' + str(i) + '.' + ft
file_id = da.set_file(u, f, ft, dt, 9606, z)
if dt == 'RNA-seq' and ft == 'fastq' and random.choice([0,1]) == 1:
f = '/tmp/test/' + dt + '/test_' + str(i) + '.bam'
da.set_file(u, f, 'bam', dt, 9606, None, [file_id])
for u in users:
results = da.get_files_by_user(u)
print u, len(results)
| import random
from dmp import dmp
users = ["adam", "ben", "chris", "denis", "eric"]
file_types = ["fastq", "fasta", "bam", "bed", "hdf5", "tsv", "wig", "pdb"]
data_types = ['RNA-seq', 'MNase-Seq', 'ChIP-seq', 'WGBS', 'HiC']
compressed = [None, 'gzip', 'zip']
da = dmp(test=False)
for i in xrange(10):
u = random.choice(users)
ft = random.choice(file_types)
dt = random.choice(data_types)
z = random.choice(compressed)
f = '/tmp/test/' + dt + '/test_' + str(i) + '.' + ft
file_id = da.set_file(u, f, ft, dt, 9606, z)
if dt == 'RNA-seq' and ft == 'fastq' and random.choice([0,1]) == 1:
f = '/tmp/test/' + dt + '/test_' + str(i) + '.bam'
da.set_file(u, f, 'bam', dt, 9606, None, [file_id])
for u in users:
results = da.get_files_by_user(u)
print u, len(results)
| Use the provided mongo db rather than mongomock | Use the provided mongo db rather than mongomock
| Python | apache-2.0 | Multiscale-Genomics/mg-dm-api,Multiscale-Genomics/mg-dm-api | ---
+++
@@ -7,7 +7,7 @@
data_types = ['RNA-seq', 'MNase-Seq', 'ChIP-seq', 'WGBS', 'HiC']
compressed = [None, 'gzip', 'zip']
-da = dmp(test=True)
+da = dmp(test=False)
for i in xrange(10):
u = random.choice(users) |
77d264bd25e0556eb3680b845de22b62d2ebd3e6 | bouncer/embed_detector.py | bouncer/embed_detector.py | import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
| import fnmatch
import re
from urllib.parse import urlparse
# Hardcoded URL patterns where client is assumed to be embedded.
#
# Only the hostname and path are included in the pattern. The path must be
# specified; use "example.com/*" to match all URLs on a particular domain.
#
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
# Hypothesis websites.
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
# Publisher partners:
# American Psychological Organization.
"psycnet.apa.org/fulltext/*",
"awspntest.apa.org/fulltext/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS]
def url_embeds_client(url):
"""
Test whether ``url`` is known to embed the client.
This currently just tests the URL against the pattern list ``PATTERNS``.
Only the hostname and path of the URL are tested. Returns false for non-HTTP
URLs.
:return: True if the URL matches a pattern.
"""
parsed_url = urlparse(url)
if not parsed_url.scheme.startswith("http"):
return False
path = parsed_url.path
if not path:
path = "/"
netloc_and_path = parsed_url.netloc + path
for pat in COMPILED_PATTERNS:
if pat.fullmatch(netloc_and_path):
return True
return False
| Add APA websites to URL patterns where client is known to be embedded. | Add APA websites to URL patterns where client is known to be embedded.
URL patterns provided by Kadidra McCloud at APA.
Fixes https://github.com/hypothesis/product-backlog/issues/814
| Python | bsd-2-clause | hypothesis/bouncer,hypothesis/bouncer,hypothesis/bouncer | ---
+++
@@ -10,8 +10,15 @@
# Patterns are shell-style wildcards ('*' matches any number of chars, '?'
# matches a single char).
PATTERNS = [
+ # Hypothesis websites.
"h.readthedocs.io/*",
"web.hypothes.is/blog/*",
+
+ # Publisher partners:
+
+ # American Psychological Organization.
+ "psycnet.apa.org/fulltext/*",
+ "awspntest.apa.org/fulltext/*",
]
COMPILED_PATTERNS = [re.compile(fnmatch.translate(pat)) for pat in PATTERNS] |
ab035185e2c2023280c29aa5239deac820ec873d | openprescribing/openprescribing/settings/e2etest.py | openprescribing/openprescribing/settings/e2etest.py | from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
| from __future__ import absolute_import
from .test import *
DATABASES = {
"default": {
"ENGINE": "django.contrib.gis.db.backends.postgis",
"NAME": utils.get_env_setting("E2E_DB_NAME"),
"USER": utils.get_env_setting("DB_USER"),
"PASSWORD": utils.get_env_setting("DB_PASS"),
"HOST": utils.get_env_setting("DB_HOST", "127.0.0.1"),
}
}
PIPELINE_METADATA_DIR = os.path.join(APPS_ROOT, "pipeline", "metadata")
PIPELINE_DATA_BASEDIR = os.path.join(APPS_ROOT, "pipeline", "e2e-test-data", "data", "")
PIPELINE_IMPORT_LOG_PATH = os.path.join(
APPS_ROOT, "pipeline", "e2e-test-data", "log.json"
)
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
# We want to use the real measure definitions, not the test ones!
MEASURE_DEFINITIONS_PATH = os.path.join(APPS_ROOT, "measure_definitions")
| Use real measure definitions in e2e tests | Use real measure definitions in e2e tests | Python | mit | ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc | ---
+++
@@ -22,3 +22,6 @@
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
+
+# We want to use the real measure definitions, not the test ones!
+MEASURE_DEFINITIONS_PATH = os.path.join(APPS_ROOT, "measure_definitions") |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.