commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 152
6.66k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
6c54fc230e8c889a2351f20b524382a5c6e29d1c | examples/apps.py | examples/apps.py | # coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN.')
sys.exit(1)
api = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this token has access to
for app in api.apps:
print(app.name)
# Update one specific app
api.apps.update('my-awesome-app', {'description': 'My awesome app'})
# Get information for one app
app = App.get('my-awesome-app')
print('%s: %s' % (app.name, app.description))
# List all services instances for app
for service in app.services:
print('Service: %s' % service.name)
| # coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN env variables.')
sys.exit(1)
# Creating TsuruClient instance
tsuru = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this user has access to
for app in tsuru.apps.list():
print('App: {}'.format(app.name))
# Get information for one app
app = tsuru.apps.get('my-awesome-app')
print('{app.name}: {app.description}'.format(app=app))
# Update specific app
tsuru.apps.update('my-awesome-app', {'description': 'My new awesome description'})
| Update examples to match docs | Update examples to match docs
Use the interface defined in the docs in the examples scripts.
| Python | mit | rcmachado/pysuru | <REPLACE_OLD> TSURU_TOKEN.')
<REPLACE_NEW> TSURU_TOKEN env variables.')
<REPLACE_END> <REPLACE_OLD> sys.exit(1)
api <REPLACE_NEW> sys.exit(1)
# Creating TsuruClient instance
tsuru <REPLACE_END> <REPLACE_OLD> token <REPLACE_NEW> user <REPLACE_END> <REPLACE_OLD> api.apps:
<REPLACE_NEW> tsuru.apps.list():
<REPLACE_END> <REPLACE_OLD> print(app.name)
# Update one specific app
api.apps.update('my-awesome-app', {'description': 'My awesome app'})
# <REPLACE_NEW> print('App: {}'.format(app.name))
# <REPLACE_END> <REPLACE_OLD> App.get('my-awesome-app')
print('%s: %s' % (app.name, app.description))
# List all services instances for app
for service in app.services:
print('Service: %s' % service.name)
<REPLACE_NEW> tsuru.apps.get('my-awesome-app')
print('{app.name}: {app.description}'.format(app=app))
# Update specific app
tsuru.apps.update('my-awesome-app', {'description': 'My new awesome description'})
<REPLACE_END> <|endoftext|> # coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN env variables.')
sys.exit(1)
# Creating TsuruClient instance
tsuru = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this user has access to
for app in tsuru.apps.list():
print('App: {}'.format(app.name))
# Get information for one app
app = tsuru.apps.get('my-awesome-app')
print('{app.name}: {app.description}'.format(app=app))
# Update specific app
tsuru.apps.update('my-awesome-app', {'description': 'My new awesome description'})
| Update examples to match docs
Use the interface defined in the docs in the examples scripts.
# coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN.')
sys.exit(1)
api = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this token has access to
for app in api.apps:
print(app.name)
# Update one specific app
api.apps.update('my-awesome-app', {'description': 'My awesome app'})
# Get information for one app
app = App.get('my-awesome-app')
print('%s: %s' % (app.name, app.description))
# List all services instances for app
for service in app.services:
print('Service: %s' % service.name)
|
98a4cd76ce9ecb81675ebaa29b249a8d80347e0d | zc-list.py | zc-list.py | #!/usr/bin/env python
import client_wrap
KEY_LONG = "key1"
DATA_LONG = 1024
KEY_DOUBLE = "key2"
DATA_DOUBLE = 100.53
KEY_STRING = "key3"
DATA_STRING = "test data"
def init_data(client):
client.WriteLong(KEY_LONG, DATA_LONG)
client.WriteDouble(KEY_DOUBLE, DATA_DOUBLE)
client.WriteString(KEY_STRING, DATA_STRING)
def check_data(client):
assert DATA_LONG == client.ReadLong(KEY_LONG)
assert DATA_DOUBLE == client.ReadDouble(KEY_DOUBLE)
assert DATA_STRING == client.ReadString(KEY_STRING)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
init_data(client)
check_data(client)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import client_wrap
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
return
print keys
if __name__ == "__main__":
main()
| Implement displaying of the current key list | Implement displaying of the current key list
| Python | agpl-3.0 | ellysh/zero-cache-utils,ellysh/zero-cache-utils | <REPLACE_OLD> client_wrap
KEY_LONG = "key1"
DATA_LONG = 1024
KEY_DOUBLE = "key2"
DATA_DOUBLE = 100.53
KEY_STRING = "key3"
DATA_STRING = "test data"
def init_data(client):
client.WriteLong(KEY_LONG, DATA_LONG)
client.WriteDouble(KEY_DOUBLE, DATA_DOUBLE)
client.WriteString(KEY_STRING, DATA_STRING)
def check_data(client):
assert DATA_LONG == client.ReadLong(KEY_LONG)
assert DATA_DOUBLE == client.ReadDouble(KEY_DOUBLE)
assert DATA_STRING == client.ReadString(KEY_STRING)
def <REPLACE_NEW> client_wrap
def <REPLACE_END> <REPLACE_OLD> 0)
<REPLACE_NEW> 0)
<REPLACE_END> <REPLACE_OLD> init_data(client)
<REPLACE_NEW> key_str = client.GetKeys()
<REPLACE_END> <REPLACE_OLD> check_data(client)
if <REPLACE_NEW> keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
return
print keys
if <REPLACE_END> <|endoftext|> #!/usr/bin/env python
import client_wrap
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
return
print keys
if __name__ == "__main__":
main()
| Implement displaying of the current key list
#!/usr/bin/env python
import client_wrap
KEY_LONG = "key1"
DATA_LONG = 1024
KEY_DOUBLE = "key2"
DATA_DOUBLE = 100.53
KEY_STRING = "key3"
DATA_STRING = "test data"
def init_data(client):
client.WriteLong(KEY_LONG, DATA_LONG)
client.WriteDouble(KEY_DOUBLE, DATA_DOUBLE)
client.WriteString(KEY_STRING, DATA_STRING)
def check_data(client):
assert DATA_LONG == client.ReadLong(KEY_LONG)
assert DATA_DOUBLE == client.ReadDouble(KEY_DOUBLE)
assert DATA_STRING == client.ReadString(KEY_STRING)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
init_data(client)
check_data(client)
if __name__ == "__main__":
main()
|
3fc94b4cffcfd08b439386fb2b01aa1e12fec6d5 | iati/core/tests/test_data.py | iati/core/tests/test_data.py | """A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
pass
| """A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
def test_dataset_no_params(self):
"""Test Dataset creation with no parameters."""
pass
def test_dataset_valid_xml_string(self):
"""Test Dataset creation with a valid XML string that is not IATI data."""
pass
def test_dataset_valid_iati_string(self):
"""Test Dataset creation with a valid IATI XML string."""
pass
def test_dataset_invalid_xml_string(self):
"""Test Dataset creation with a string that is not valid XML."""
pass
def test_dataset_tree(self):
"""Test Dataset creation with an etree that is not valid IATI data."""
pass
def test_dataset_iati_tree(self):
"""Test Dataset creation with a valid IATI etree."""
pass
def test_dataset_no_params_strict(self):
"""Test Dataset creation with no parameters.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_xml_string_strict(self):
"""Test Dataset creation with a valid XML string that is not IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_iati_string_strict(self):
"""Test Dataset creation with a valid IATI XML string.
Strict IATI checks are enabled.
"""
pass
def test_dataset_invalid_xml_string_strict(self):
"""Test Dataset creation with a string that is not valid XML.
Strict IATI checks are enabled.
"""
pass
def test_dataset_tree_strict(self):
"""Test Dataset creation with an etree that is not valid IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_iati_tree_strict(self):
"""Test Dataset creation with a valid IATI etree.
Strict IATI checks are enabled.
"""
pass
| Test stubs for dataset creation | Test stubs for dataset creation
| Python | mit | IATI/iati.core,IATI/iati.core | <INSERT> def test_dataset_no_params(self):
"""Test Dataset creation with no parameters."""
pass
def test_dataset_valid_xml_string(self):
"""Test Dataset creation with a valid XML string that is not IATI data."""
pass
def test_dataset_valid_iati_string(self):
"""Test Dataset creation with a valid IATI XML string."""
pass
def test_dataset_invalid_xml_string(self):
"""Test Dataset creation with a string that is not valid XML."""
pass
def test_dataset_tree(self):
"""Test Dataset creation with an etree that is not valid IATI data."""
pass
def test_dataset_iati_tree(self):
"""Test Dataset creation with a valid IATI etree."""
pass
def test_dataset_no_params_strict(self):
"""Test Dataset creation with no parameters.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_xml_string_strict(self):
"""Test Dataset creation with a valid XML string that is not IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_iati_string_strict(self):
"""Test Dataset creation with a valid IATI XML string.
Strict IATI checks are enabled.
"""
pass
def test_dataset_invalid_xml_string_strict(self):
"""Test Dataset creation with a string that is not valid XML.
Strict IATI checks are enabled.
"""
pass
def test_dataset_tree_strict(self):
"""Test Dataset creation with an etree that is not valid IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_iati_tree_strict(self):
"""Test Dataset creation with a valid IATI etree.
Strict IATI checks are enabled.
"""
<INSERT_END> <|endoftext|> """A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
def test_dataset_no_params(self):
"""Test Dataset creation with no parameters."""
pass
def test_dataset_valid_xml_string(self):
"""Test Dataset creation with a valid XML string that is not IATI data."""
pass
def test_dataset_valid_iati_string(self):
"""Test Dataset creation with a valid IATI XML string."""
pass
def test_dataset_invalid_xml_string(self):
"""Test Dataset creation with a string that is not valid XML."""
pass
def test_dataset_tree(self):
"""Test Dataset creation with an etree that is not valid IATI data."""
pass
def test_dataset_iati_tree(self):
"""Test Dataset creation with a valid IATI etree."""
pass
def test_dataset_no_params_strict(self):
"""Test Dataset creation with no parameters.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_xml_string_strict(self):
"""Test Dataset creation with a valid XML string that is not IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_iati_string_strict(self):
"""Test Dataset creation with a valid IATI XML string.
Strict IATI checks are enabled.
"""
pass
def test_dataset_invalid_xml_string_strict(self):
"""Test Dataset creation with a string that is not valid XML.
Strict IATI checks are enabled.
"""
pass
def test_dataset_tree_strict(self):
"""Test Dataset creation with an etree that is not valid IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_iati_tree_strict(self):
"""Test Dataset creation with a valid IATI etree.
Strict IATI checks are enabled.
"""
pass
| Test stubs for dataset creation
"""A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
pass
|
cf49e996f07a2fd7107b953369fdccdc850d51d8 | test_tws/test_EReader.py | test_tws/test_EReader.py | '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| Create EReader object using EClientSocket.createReader() | Create EReader object using EClientSocket.createReader() | Python | bsd-3-clause | kbluck/pytws,kbluck/pytws | <REPLACE_OLD> EReader(self.parent, <REPLACE_NEW> self.parent.createReader(self.parent, <REPLACE_END> <|endoftext|> '''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = self.parent.createReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
| Create EReader object using EClientSocket.createReader()
'''Unit test package for module "tws._EReader".'''
__copyright__ = "Copyright (c) 2008 Kevin J Bluck"
__version__ = "$Id$"
import unittest
from StringIO import StringIO
from tws import EClientSocket, EReader
from test_tws import mock_wrapper
class test_EReader(unittest.TestCase):
'''Test class "tws.EReader"'''
def setUp(self):
self.wrapper = mock_wrapper()
self.parent = EClientSocket(self.wrapper)
self.stream = StringIO()
self.reader = EReader(self.parent, self.stream)
def test_init(self):
self.assertTrue(EReader(self.parent, self.stream))
if __debug__:
self.assertRaises(AssertionError, EReader, 1, self.stream)
self.assertRaises(AssertionError, EReader, self.parent, 1)
|
ab5ebb50019add34333edb04cc96f7f55fce8d1c | src/toil/utils/__init__.py | src/toil/utils/__init__.py | from __future__ import absolute_import
from toil import version
import logging
logger = logging.getLogger(__name__)
def addBasicProvisionerOptions(parser):
parser.add_argument("--version", action='version', version=version)
parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws",
help="The provisioner for cluster auto-scaling. Only aws is currently "
"supported")
try:
from toil.provisioners.aws import getCurrentAWSZone
currentZone = getCurrentAWSZone()
except ImportError:
currentZone = None
zoneString = currentZone if currentZone else 'No zone could be determined'
parser.add_argument('-z', '--zone', dest='zone', required=False, default=currentZone,
help="The AWS availability zone of the master. This parameter can also be "
"set via the TOIL_AWS_ZONE environment variable, or by the ec2_region_name "
"parameter in your .boto file, or derived from the instance metadata if "
"using this utility on an existing EC2 instance. "
"Currently: %s" % zoneString)
parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. "
"Must be lowercase and may not contain the '_' "
"character.")
return parser
| from __future__ import absolute_import
from toil import version
import logging
import os
logger = logging.getLogger(__name__)
def addBasicProvisionerOptions(parser):
parser.add_argument("--version", action='version', version=version)
parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws",
help="The provisioner for cluster auto-scaling. Only aws is currently "
"supported")
parser.add_argument('-z', '--zone', dest='zone', required=False, default=None,
help="The availability zone of the master. This parameter can also be set via the 'TOIL_X_ZONE' "
"environment variable, where X is AWS, GCE, or AZURE, or by the ec2_region_name parameter "
"in your .boto file, or derived from the instance metadata if using this utility on an "
"existing EC2 instance.")
parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. "
"Must be lowercase and may not contain the '_' "
"character.")
return parser
def getZoneFromEnv(provisioner):
"""
Find the zone specified in an environment variable.
The user can specify zones in environment variables in leiu of writing them at the commandline every time.
Given a provisioner, this method will look for the stored value and return it.
:param str provisioner: One of the supported provisioners ('azure', 'aws', 'gce')
:rtype: str
:return: None or the value stored in a 'TOIL_X_ZONE' environment variable.
"""
return os.environ.get('TOIL_' + provisioner.upper() + '_ZONE')
| Remove default for zone, add method for searching for specified zone in environ vars. | Remove default for zone, add method for searching for specified zone in environ vars.
| Python | apache-2.0 | BD2KGenomics/slugflow,BD2KGenomics/slugflow | <REPLACE_OLD> logging
logger <REPLACE_NEW> logging
import os
logger <REPLACE_END> <DELETE> try:
from toil.provisioners.aws import getCurrentAWSZone
currentZone = getCurrentAWSZone()
except ImportError:
currentZone = None
zoneString = currentZone if currentZone else 'No zone could be determined'
<DELETE_END> <REPLACE_OLD> default=currentZone,
<REPLACE_NEW> default=None,
<REPLACE_END> <DELETE> AWS <DELETE_END> <REPLACE_OLD> "
"set <REPLACE_NEW> set <REPLACE_END> <REPLACE_OLD> TOIL_AWS_ZONE environment <REPLACE_NEW> 'TOIL_X_ZONE' "
"environment <REPLACE_END> <INSERT> where X is AWS, GCE, or AZURE, <INSERT_END> <REPLACE_OLD> "
"parameter in <REPLACE_NEW> parameter "
"in <REPLACE_END> <REPLACE_OLD> "
"using <REPLACE_NEW> using <REPLACE_END> <REPLACE_OLD> existing <REPLACE_NEW> "
"existing <REPLACE_END> <REPLACE_OLD> instance. "
"Currently: %s" % zoneString)
<REPLACE_NEW> instance.")
<REPLACE_END> <REPLACE_OLD> parser
<REPLACE_NEW> parser
def getZoneFromEnv(provisioner):
"""
Find the zone specified in an environment variable.
The user can specify zones in environment variables in leiu of writing them at the commandline every time.
Given a provisioner, this method will look for the stored value and return it.
:param str provisioner: One of the supported provisioners ('azure', 'aws', 'gce')
:rtype: str
:return: None or the value stored in a 'TOIL_X_ZONE' environment variable.
"""
return os.environ.get('TOIL_' + provisioner.upper() + '_ZONE')
<REPLACE_END> <|endoftext|> from __future__ import absolute_import
from toil import version
import logging
import os
logger = logging.getLogger(__name__)
def addBasicProvisionerOptions(parser):
parser.add_argument("--version", action='version', version=version)
parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws",
help="The provisioner for cluster auto-scaling. Only aws is currently "
"supported")
parser.add_argument('-z', '--zone', dest='zone', required=False, default=None,
help="The availability zone of the master. This parameter can also be set via the 'TOIL_X_ZONE' "
"environment variable, where X is AWS, GCE, or AZURE, or by the ec2_region_name parameter "
"in your .boto file, or derived from the instance metadata if using this utility on an "
"existing EC2 instance.")
parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. "
"Must be lowercase and may not contain the '_' "
"character.")
return parser
def getZoneFromEnv(provisioner):
"""
Find the zone specified in an environment variable.
The user can specify zones in environment variables in leiu of writing them at the commandline every time.
Given a provisioner, this method will look for the stored value and return it.
:param str provisioner: One of the supported provisioners ('azure', 'aws', 'gce')
:rtype: str
:return: None or the value stored in a 'TOIL_X_ZONE' environment variable.
"""
return os.environ.get('TOIL_' + provisioner.upper() + '_ZONE')
| Remove default for zone, add method for searching for specified zone in environ vars.
from __future__ import absolute_import
from toil import version
import logging
logger = logging.getLogger(__name__)
def addBasicProvisionerOptions(parser):
parser.add_argument("--version", action='version', version=version)
parser.add_argument('-p', "--provisioner", dest='provisioner', choices=['aws', 'azure', 'gce'], required=False, default="aws",
help="The provisioner for cluster auto-scaling. Only aws is currently "
"supported")
try:
from toil.provisioners.aws import getCurrentAWSZone
currentZone = getCurrentAWSZone()
except ImportError:
currentZone = None
zoneString = currentZone if currentZone else 'No zone could be determined'
parser.add_argument('-z', '--zone', dest='zone', required=False, default=currentZone,
help="The AWS availability zone of the master. This parameter can also be "
"set via the TOIL_AWS_ZONE environment variable, or by the ec2_region_name "
"parameter in your .boto file, or derived from the instance metadata if "
"using this utility on an existing EC2 instance. "
"Currently: %s" % zoneString)
parser.add_argument("clusterName", help="The name that the cluster will be identifiable by. "
"Must be lowercase and may not contain the '_' "
"character.")
return parser
|
01d35d13aaedea0ef87ae1d78ee1368e5e0f407c | corehq/apps/locations/management/commands/set_location_id.py | corehq/apps/locations/management/commands/set_location_id.py | from django.core.management.base import BaseCommand
from dimagi.utils.couch.database import iter_docs
from corehq.apps.users.models import CouchUser, CommCareUser
class Command(BaseCommand):
help = ''
def handle(self, *args, **options):
self.stdout.write("Population location_id field...\n")
relevant_ids = set([r['id'] for r in CouchUser.get_db().view(
'users/by_username',
reduce=False,
).all()])
to_save = []
for user_doc in iter_docs(CouchUser.get_db(), relevant_ids):
user = CommCareUser.get(user_doc['_id'])
if user._locations:
user_doc['location_id'] = user._locations[0]._id
to_save.append(user_doc)
if len(to_save) > 500:
CouchUser.get_db().bulk_save(to_save)
to_save = []
if to_save:
CouchUser.get_db().bulk_save(to_save)
| Move migration into main branch | Move migration into main branch
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq | <REPLACE_OLD> <REPLACE_NEW> from django.core.management.base import BaseCommand
from dimagi.utils.couch.database import iter_docs
from corehq.apps.users.models import CouchUser, CommCareUser
class Command(BaseCommand):
help = ''
def handle(self, *args, **options):
self.stdout.write("Population location_id field...\n")
relevant_ids = set([r['id'] for r in CouchUser.get_db().view(
'users/by_username',
reduce=False,
).all()])
to_save = []
for user_doc in iter_docs(CouchUser.get_db(), relevant_ids):
user = CommCareUser.get(user_doc['_id'])
if user._locations:
user_doc['location_id'] = user._locations[0]._id
to_save.append(user_doc)
if len(to_save) > 500:
CouchUser.get_db().bulk_save(to_save)
to_save = []
if to_save:
CouchUser.get_db().bulk_save(to_save)
<REPLACE_END> <|endoftext|> from django.core.management.base import BaseCommand
from dimagi.utils.couch.database import iter_docs
from corehq.apps.users.models import CouchUser, CommCareUser
class Command(BaseCommand):
help = ''
def handle(self, *args, **options):
self.stdout.write("Population location_id field...\n")
relevant_ids = set([r['id'] for r in CouchUser.get_db().view(
'users/by_username',
reduce=False,
).all()])
to_save = []
for user_doc in iter_docs(CouchUser.get_db(), relevant_ids):
user = CommCareUser.get(user_doc['_id'])
if user._locations:
user_doc['location_id'] = user._locations[0]._id
to_save.append(user_doc)
if len(to_save) > 500:
CouchUser.get_db().bulk_save(to_save)
to_save = []
if to_save:
CouchUser.get_db().bulk_save(to_save)
| Move migration into main branch
|
|
eed413229978523b41a637c68c34100a31270643 | scripts/TestHarness/testers/RavenUtils.py | scripts/TestHarness/testers/RavenUtils.py | import os
import subprocess
def inPython3():
return os.environ.get("CHECK_PYTHON3","0") == "1"
def checkForMissingModules():
missing = []
too_old = []
to_try = [("numpy",'numpy.version.version',"1.7"),
("h5py",'',''),
("scipy",'scipy.__version__',"0.12"),
("sklearn",'sklearn.__version__',"0.14"),
("matplotlib",'matplotlib.__version__',"1.4")]
for i,fv,ev in to_try:
if len(fv) > 0:
check = ';import sys; sys.exit(not '+fv+' >= "'+ev+'")'
else:
check = ''
if inPython3():
python = 'python3'
else:
python = 'python'
result = subprocess.call([python,'-c','import '+i])
if result != 0:
missing.append(i)
else:
result = subprocess.call([python,'-c','import '+i+check])
if result != 0:
too_old.append(i+" should be at least version "+ev)
return missing,too_old
| import os
import subprocess
def inPython3():
return os.environ.get("CHECK_PYTHON3","0") == "1"
def checkForMissingModules():
missing = []
too_old = []
to_try = [("numpy",'numpy.version.version',"1.7"),
("h5py",'',''),
("scipy",'scipy.__version__',"0.12"),
("sklearn",'sklearn.__version__',"0.14"),
("matplotlib",'matplotlib.__version__',"1.3")]
for i,fv,ev in to_try:
if len(fv) > 0:
check = ';import sys; sys.exit(not '+fv+' >= "'+ev+'")'
else:
check = ''
if inPython3():
python = 'python3'
else:
python = 'python'
result = subprocess.call([python,'-c','import '+i])
if result != 0:
missing.append(i)
else:
result = subprocess.call([python,'-c','import '+i+check])
if result != 0:
too_old.append(i+" should be at least version "+ev)
return missing,too_old
| Decrease the needed matplotlib to 1.3, to make it easier to get installed. | Decrease the needed matplotlib to 1.3, to make it easier to get installed.
| Python | apache-2.0 | joshua-cogliati-inl/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,idaholab/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven | <REPLACE_OLD> ("matplotlib",'matplotlib.__version__',"1.4")]
<REPLACE_NEW> ("matplotlib",'matplotlib.__version__',"1.3")]
<REPLACE_END> <|endoftext|> import os
import subprocess
def inPython3():
return os.environ.get("CHECK_PYTHON3","0") == "1"
def checkForMissingModules():
missing = []
too_old = []
to_try = [("numpy",'numpy.version.version',"1.7"),
("h5py",'',''),
("scipy",'scipy.__version__',"0.12"),
("sklearn",'sklearn.__version__',"0.14"),
("matplotlib",'matplotlib.__version__',"1.3")]
for i,fv,ev in to_try:
if len(fv) > 0:
check = ';import sys; sys.exit(not '+fv+' >= "'+ev+'")'
else:
check = ''
if inPython3():
python = 'python3'
else:
python = 'python'
result = subprocess.call([python,'-c','import '+i])
if result != 0:
missing.append(i)
else:
result = subprocess.call([python,'-c','import '+i+check])
if result != 0:
too_old.append(i+" should be at least version "+ev)
return missing,too_old
| Decrease the needed matplotlib to 1.3, to make it easier to get installed.
import os
import subprocess
def inPython3():
return os.environ.get("CHECK_PYTHON3","0") == "1"
def checkForMissingModules():
missing = []
too_old = []
to_try = [("numpy",'numpy.version.version',"1.7"),
("h5py",'',''),
("scipy",'scipy.__version__',"0.12"),
("sklearn",'sklearn.__version__',"0.14"),
("matplotlib",'matplotlib.__version__',"1.4")]
for i,fv,ev in to_try:
if len(fv) > 0:
check = ';import sys; sys.exit(not '+fv+' >= "'+ev+'")'
else:
check = ''
if inPython3():
python = 'python3'
else:
python = 'python'
result = subprocess.call([python,'-c','import '+i])
if result != 0:
missing.append(i)
else:
result = subprocess.call([python,'-c','import '+i+check])
if result != 0:
too_old.append(i+" should be at least version "+ev)
return missing,too_old
|
a501b99fa60ca5118d2a0e0be4e8c2dff5bd385d | ci/check-benchmark.py | ci/check-benchmark.py | #!/usr/bin/env python3
import json
import sys
def run_compare(report):
with open(report) as f:
doc = json.load(f)
for testcase in doc:
measurements = testcase['measurements']
time = float(measurements[0]["time"])
if time < 0:
continue
if time > 0.05:
print("More than 5% performance decrease, considering it a failure")
sys.exit(2)
def main(argv):
if len(argv) != 2:
print(f'Usage: {argv[0]} <path-to-compare.json>')
sys.exit(1)
run_compare(argv[1])
if __name__ == '__main__':
main(sys.argv)
| Add a script to process benchmark comparisons | CI: Add a script to process benchmark comparisons
| Python | lgpl-2.1 | chouquette/medialibrary,chouquette/medialibrary,chouquette/medialibrary,chouquette/medialibrary | <INSERT> #!/usr/bin/env python3
import json
import sys
def run_compare(report):
<INSERT_END> <INSERT> with open(report) as f:
doc = json.load(f)
for testcase in doc:
measurements = testcase['measurements']
time = float(measurements[0]["time"])
if time < 0:
continue
if time > 0.05:
print("More than 5% performance decrease, considering it a failure")
sys.exit(2)
def main(argv):
if len(argv) != 2:
print(f'Usage: {argv[0]} <path-to-compare.json>')
sys.exit(1)
run_compare(argv[1])
if __name__ == '__main__':
main(sys.argv)
<INSERT_END> <|endoftext|> #!/usr/bin/env python3
import json
import sys
def run_compare(report):
with open(report) as f:
doc = json.load(f)
for testcase in doc:
measurements = testcase['measurements']
time = float(measurements[0]["time"])
if time < 0:
continue
if time > 0.05:
print("More than 5% performance decrease, considering it a failure")
sys.exit(2)
def main(argv):
if len(argv) != 2:
print(f'Usage: {argv[0]} <path-to-compare.json>')
sys.exit(1)
run_compare(argv[1])
if __name__ == '__main__':
main(sys.argv)
| CI: Add a script to process benchmark comparisons
|
|
277ec688d7f92c415446e700db42386620d9b418 | satnogsclient/settings.py | satnogsclient/settings.py | from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
| Add configuration file for client | Add configuration file for client
| Python | agpl-3.0 | adamkalis/satnogs-client,cshields/satnogs-client,adamkalis/satnogs-client,cshields/satnogs-client | <REPLACE_OLD> <REPLACE_NEW> from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
<REPLACE_END> <|endoftext|> from os import environ
DEMODULATION_COMMAND = environ.get('DEMODULATION_COMMAND', None)
ENCODING_COMMAND = environ.get('ENCODING_COMMAND', None)
DECODING_COMMAND = environ.get('DECODING_COMMAND', None)
| Add configuration file for client
|
|
fc6202425e0c855dc29980904949b60c0ac48bbf | preparation/tools/build_assets.py | preparation/tools/build_assets.py | from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| Add start/finish debug info while generating | Add start/finish debug info while generating
| Python | mit | hatbot-team/hatbot_resources | <INSERT> trunk = resource_name.replace('Resource', '')
<INSERT_END> <REPLACE_OLD> get_storage(resource_name.replace('Resource', '')) <REPLACE_NEW> get_storage(trunk) <REPLACE_END> <INSERT> print("Starting {} generation".format(trunk))
<INSERT_END> <REPLACE_OLD> out_storage.add_entry(r)
def <REPLACE_NEW> out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def <REPLACE_END> <|endoftext|> from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
trunk = resource_name.replace('Resource', '')
with get_storage(trunk) as out_storage:
print("Starting {} generation".format(trunk))
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
print("Finished {} generation".format(trunk))
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
| Add start/finish debug info while generating
from copy import copy
from preparation.resources.Resource import names_registered, resource_by_name
from hb_res.storage import get_storage
def rebuild_from_resource(resource_name: str):
resource = resource_by_name(resource_name)()
with get_storage(resource_name.replace('Resource', '')) as out_storage:
out_storage.clear()
for explanation in resource:
r = copy(explanation)
for functor in resource.modifiers:
if r is None:
break
r = functor(r)
if r is not None:
out_storage.add_entry(r)
def rebuild_all():
for name in names_registered():
rebuild_from_resource(name)
|
a870433fab72fe184f12353397ad916aabe5cb61 | pegasus/gtfar/__init__.py | pegasus/gtfar/__init__.py | # Copyright 2007-2014 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Rajiv Mayani'
| # Copyright 2007-2014 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Rajiv Mayani'
__VERSION__ = 0.1
from flask import Flask
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load configuration defaults
app.config.from_object('pegasus.gtfar.defaults')
db = SQLAlchemy(app)
cache = Cache(app)
| Add boilerplate code to configure the Flask app. | Add boilerplate code to configure the Flask app.
| Python | apache-2.0 | pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar,pegasus-isi/pegasus-gtfar | <REPLACE_OLD> Mayani'
<REPLACE_NEW> Mayani'
__VERSION__ = 0.1
from flask import Flask
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load configuration defaults
app.config.from_object('pegasus.gtfar.defaults')
db = SQLAlchemy(app)
cache = Cache(app)
<REPLACE_END> <|endoftext|> # Copyright 2007-2014 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Rajiv Mayani'
__VERSION__ = 0.1
from flask import Flask
from flask.ext.cache import Cache
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
# Load configuration defaults
app.config.from_object('pegasus.gtfar.defaults')
db = SQLAlchemy(app)
cache = Cache(app)
| Add boilerplate code to configure the Flask app.
# Copyright 2007-2014 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Rajiv Mayani'
|
0a33b7d8df544226df711db33a27f45421c19290 | setup.py | setup.py | from setuptools import setup
version = '2.0.0'
setup(name='pyactiveresource',
version=version,
description='ActiveResource for Python',
author='Shopify',
author_email='[email protected]',
url='https://github.com/Shopify/pyactiveresource/',
packages=['pyactiveresource', 'pyactiveresource/testing'],
license='MIT License',
test_suite='test',
tests_require=[
'python-dateutil<2.0', # >= 2.0 is for python>=3.0
'PyYAML',
],
platforms=['any'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
)
| from setuptools import setup
import sys
version = '2.0.0'
if sys.version_info >= (3,):
python_dateutils_version = 'python-dateutil>=2.0'
else:
python_dateutils_version = 'python-dateutil<2.0'
setup(name='pyactiveresource',
version=version,
description='ActiveResource for Python',
author='Shopify',
author_email='[email protected]',
url='https://github.com/Shopify/pyactiveresource/',
packages=['pyactiveresource', 'pyactiveresource/testing'],
license='MIT License',
test_suite='test',
tests_require=[
python_dateutils_version,
'PyYAML',
],
platforms=['any'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
)
| Use the right version of python-dateutils when using python 3. | Use the right version of python-dateutils when using python 3.
| Python | mit | metric-collective/pyactiveresource,piran/pyactiveresource,varesa/pyactiveresource,hockeybuggy/pyactiveresource | <REPLACE_OLD> setup
version <REPLACE_NEW> setup
import sys
version <REPLACE_END> <REPLACE_OLD> '2.0.0'
setup(name='pyactiveresource',
<REPLACE_NEW> '2.0.0'
if sys.version_info >= (3,):
python_dateutils_version = 'python-dateutil>=2.0'
else:
python_dateutils_version = 'python-dateutil<2.0'
setup(name='pyactiveresource',
<REPLACE_END> <REPLACE_OLD> 'python-dateutil<2.0', # >= 2.0 is for python>=3.0
<REPLACE_NEW> python_dateutils_version,
<REPLACE_END> <|endoftext|> from setuptools import setup
import sys
version = '2.0.0'
if sys.version_info >= (3,):
python_dateutils_version = 'python-dateutil>=2.0'
else:
python_dateutils_version = 'python-dateutil<2.0'
setup(name='pyactiveresource',
version=version,
description='ActiveResource for Python',
author='Shopify',
author_email='[email protected]',
url='https://github.com/Shopify/pyactiveresource/',
packages=['pyactiveresource', 'pyactiveresource/testing'],
license='MIT License',
test_suite='test',
tests_require=[
python_dateutils_version,
'PyYAML',
],
platforms=['any'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
)
| Use the right version of python-dateutils when using python 3.
from setuptools import setup
version = '2.0.0'
setup(name='pyactiveresource',
version=version,
description='ActiveResource for Python',
author='Shopify',
author_email='[email protected]',
url='https://github.com/Shopify/pyactiveresource/',
packages=['pyactiveresource', 'pyactiveresource/testing'],
license='MIT License',
test_suite='test',
tests_require=[
'python-dateutil<2.0', # >= 2.0 is for python>=3.0
'PyYAML',
],
platforms=['any'],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules']
)
|
0e2bfd59ca9db6568bac40504977d80b8ad84aba | helga_prod_fixer.py | helga_prod_fixer.py | import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
| import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
| Reboot and IE6 compatibility fixer messages | Reboot and IE6 compatibility fixer messages
| Python | mit | shaunduncan/helga-prod-fixer | <INSERT> 'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
<INSERT_END> <|endoftext|> import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
| Reboot and IE6 compatibility fixer messages
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
7a09d36448d646e29c8d0aeeb7c39df2d20885ab | test/unit/ggrc/models/test_states.py | test/unit/ggrc/models/test_states.py | """Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
| Add unit test for object state | Add unit test for object state
| Python | apache-2.0 | selahssea/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core | <REPLACE_OLD> <REPLACE_NEW> """Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
<REPLACE_END> <|endoftext|> """Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
| Add unit test for object state
|
|
e3248ba4bca04b434414570dc438547d8770adc9 | tools/ocd_restore.py | tools/ocd_restore.py | #!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
| Add basics for a restore script | Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)
| Python | bsd-3-clause | rshorey/pupa,mileswwatkins/pupa,datamade/pupa,rshorey/pupa,opencivicdata/pupa,influence-usa/pupa,mileswwatkins/pupa,influence-usa/pupa,opencivicdata/pupa,datamade/pupa | <INSERT> #!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
<INSERT_END> <INSERT> default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
<INSERT_END> <|endoftext|> #!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
| Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)
|
|
78aabbc9c66bc92fdedec740e32ad9fbd9ee8937 | pygraphc/clustering/ConnectedComponents.py | pygraphc/clustering/ConnectedComponents.py | import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
| import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| Change cluster data structure from list to dict | Change cluster data structure from list to dict
| Python | mit | studiawan/pygraphc | <REPLACE_OLD> nx
class <REPLACE_NEW> nx
from ClusterUtility import ClusterUtility
class <REPLACE_END> <REPLACE_OLD> in Proceedings of the <REPLACE_NEW> The 2nd <REPLACE_END> <REPLACE_OLD> g):
<REPLACE_NEW> graph):
<REPLACE_END> <REPLACE_OLD> class
<REPLACE_NEW> class.
<REPLACE_END> <REPLACE_OLD> g <REPLACE_NEW> graph <REPLACE_END> <REPLACE_OLD> a <REPLACE_NEW> A <REPLACE_END> <REPLACE_OLD> clustered
<REPLACE_NEW> clustered.
<REPLACE_END> <REPLACE_OLD> self.g <REPLACE_NEW> self.graph <REPLACE_END> <REPLACE_OLD> g
<REPLACE_NEW> graph
<REPLACE_END> <REPLACE_OLD> identifier.
<REPLACE_NEW> identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
<REPLACE_END> <REPLACE_OLD> list[list]
List <REPLACE_NEW> dict[list]
Dictionary <REPLACE_END> <REPLACE_OLD> []
<REPLACE_NEW> {}
cluster_id = 0
<REPLACE_END> <REPLACE_OLD> nx.connected_components(self.g):
clusters.append(components)
cluster_id <REPLACE_NEW> nx.connected_components(self.graph):
clusters[cluster_id] <REPLACE_END> <REPLACE_OLD> 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
<REPLACE_NEW> components
<REPLACE_END> <INSERT> ClusterUtility.set_cluster_id(self.graph, clusters)
<INSERT_END> <|endoftext|> import networkx as nx
from ClusterUtility import ClusterUtility
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, The 2nd International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, graph):
"""This is a constructor for ConnectedComponent class.
Parameters
----------
graph : graph
A graph to be clustered.
"""
self.graph = graph
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
This method heavily rely on the cosine similarity threshold to build an edge in a graph.
Returns
-------
clusters : dict[list]
Dictionary of cluster list, where each list contains index (line number) of event log.
"""
clusters = {}
cluster_id = 0
for components in nx.connected_components(self.graph):
clusters[cluster_id] = components
cluster_id += 1
ClusterUtility.set_cluster_id(self.graph, clusters)
return clusters
| Change cluster data structure from list to dict
import networkx as nx
class ConnectedComponents:
"""This is a class for connected component detection method to cluster event logs [1]_.
References
----------
.. [1] H. Studiawan, B. A. Pratomo, and R. Anggoro, Connected component detection for authentication log
clustering, in Proceedings of the International Seminar on Science and Technology, 2016, pp. 495-496.
"""
def __init__(self, g):
"""This is a constructor for ConnectedComponent class
Parameters
----------
g : graph
a graph to be clustered
"""
self.g = g
def get_clusters(self):
"""This method find any connected component in a graph.
A component represents a cluster and each component will be given a cluster identifier.
Returns
-------
clusters : list[list]
List of cluster list, where each list contains index (line number) of event log.
"""
clusters = []
for components in nx.connected_components(self.g):
clusters.append(components)
cluster_id = 0
for cluster in clusters:
for node in cluster:
self.g.node[node]['cluster'] = cluster_id
cluster_id += 1
return clusters
|
ecfadf8478b8775d8579812a7bd835f6ebb1ffd4 | util/rclone-list-files.py | util/rclone-list-files.py | #!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
| Add file lister for rclone export | Add file lister for rclone export | Python | bsd-3-clause | jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar,jcu-eresearch/Edgar | <INSERT> #!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
<INSERT_END> <INSERT> occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
<INSERT_END> <|endoftext|> #!/usr/bin/env python3
import glob
# For use with --files-from argument for Rclone
# This suits Edgar's structure with is
# SPECIESNAME/{occurrences|projected-distributions}/[2nd-to-latest-file-is-the-latest].zip
for folder in glob.glob('*'):
occurrences = glob.glob(folder + '/occurrences/*')
projected_distributions = glob.glob(folder + '/projected-distributions/*')
if not 'latest' in occurrences[-1] and not 'latest' in projected_distributions[-1]:
print(f'No latest in {folder}!')
exit(1)
print(folder + '/metadata.json')
print(occurrences[-2])
print(projected_distributions[-2])
| Add file lister for rclone export
|
|
b7ede20d4e82b5aba701dd02c49ca8a5fe00e0ed | dimagi/utils/prime_views.py | dimagi/utils/prime_views.py | def prime_views(pool_size):
"""
Prime the views so that a very large import doesn't cause the index
to get too far behind
"""
# These have to be included here or ./manage.py runserver explodes on
# all pages of the app with single thread related errors
from gevent.pool import Pool
from dimagi.utils.management.commands import prime_views
prime_pool = Pool(pool_size)
prime_all = prime_views.Command()
prime_all.prime_everything(prime_pool, verbose=True)
prime_pool.join()
| Move prime views method in | Move prime views method in
| Python | bsd-3-clause | qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq | <INSERT> def prime_views(pool_size):
<INSERT_END> <INSERT> """
Prime the views so that a very large import doesn't cause the index
to get too far behind
"""
# These have to be included here or ./manage.py runserver explodes on
# all pages of the app with single thread related errors
from gevent.pool import Pool
from dimagi.utils.management.commands import prime_views
prime_pool = Pool(pool_size)
prime_all = prime_views.Command()
prime_all.prime_everything(prime_pool, verbose=True)
prime_pool.join()
<INSERT_END> <|endoftext|> def prime_views(pool_size):
"""
Prime the views so that a very large import doesn't cause the index
to get too far behind
"""
# These have to be included here or ./manage.py runserver explodes on
# all pages of the app with single thread related errors
from gevent.pool import Pool
from dimagi.utils.management.commands import prime_views
prime_pool = Pool(pool_size)
prime_all = prime_views.Command()
prime_all.prime_everything(prime_pool, verbose=True)
prime_pool.join()
| Move prime views method in
|
|
f6ce7485f18d3c5299b64a9b10af08f5da1c2335 | infrastructure/control/osimctrl/src/start-opensim.py | infrastructure/control/osimctrl/src/start-opensim.py | #!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
| #!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath) | Create execCmd function and use | Create execCmd function and use
| Python | bsd-3-clause | justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools | <REPLACE_OLD> ###
if <REPLACE_NEW> ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if <REPLACE_END> <REPLACE_OLD> subprocess.check_output("screen -list", shell=True)
except:
<REPLACE_NEW> execCmd("screen -list")
except:
<REPLACE_END> <INSERT>
os.chdir(binaryPath) <INSERT_END> <|endoftext|> #!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath) | Create execCmd function and use
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
|
db9afab144c12391c9c54174b8973ec187455b9c | webpack/conf.py | webpack/conf.py | import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
| import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
| WATCH now defaults to False | WATCH now defaults to False
| Python | mit | markfinger/python-webpack,markfinger/python-webpack | <DELETE> True # TODO: should default to <DELETE_END> <|endoftext|> import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
| WATCH now defaults to False
import os
from optional_django import conf
class Conf(conf.Conf):
# Environment configuration
STATIC_ROOT = None
STATIC_URL = None
BUILD_SERVER_URL = 'http://127.0.0.1:9009'
OUTPUT_DIR = 'webpack_assets'
CONFIG_DIRS = None
CONTEXT = None
# Watching
WATCH = True # TODO: should default to False
AGGREGATE_TIMEOUT = 200
POLL = None
HMR = False
# Caching
CACHE = True
CACHE_DIR = None
def get_path_to_output_dir(self):
return os.path.join(self.STATIC_ROOT, self.OUTPUT_DIR)
def get_public_path(self):
static_url = self.STATIC_URL
if static_url and static_url.endswith('/'):
static_url = static_url[0:-1]
return '/'.join([static_url, self.OUTPUT_DIR])
settings = Conf()
|
a5cd2110283ba699f36548c42b83aa86e6b50aab | configuration.py | configuration.py | # -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Integer('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| # -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| Migrate account_id from integer field to char field | Migrate account_id from integer field to char field
| Python | bsd-3-clause | priyankarani/trytond-shipping-endicia,fulfilio/trytond-shipping-endicia,prakashpp/trytond-shipping-endicia | <INSERT> trytond import backend
from <INSERT_END> <REPLACE_OLD> ModelView
__all__ <REPLACE_NEW> ModelView
from trytond.transaction import Transaction
__all__ <REPLACE_END> <REPLACE_OLD> fields.Integer('Account <REPLACE_NEW> fields.Char('Account <REPLACE_END> <INSERT> @classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
<INSERT_END> <|endoftext|> # -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond import backend
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
from trytond.transaction import Transaction
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Char('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
@classmethod
def __register__(cls, module_name):
TableHandler = backend.get('TableHandler')
cursor = Transaction().cursor
# Migration from 3.4.0.6 : Migrate account_id field to string
if backend.name() == 'postgresql':
cursor.execute(
'SELECT pg_typeof("account_id") '
'FROM endicia_configuration '
'LIMIT 1',
)
# Check if account_id is integer field
is_integer = cursor.fetchone()[0] == 'integer'
if is_integer:
# Migrate integer field to string
table = TableHandler(cursor, cls, module_name)
table.alter_type('account_id', 'varchar')
super(EndiciaConfiguration, cls).__register__(module_name)
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
| Migrate account_id from integer field to char field
# -*- coding: utf-8 -*-
"""
configuration.py
"""
from trytond.model import fields, ModelSingleton, ModelSQL, ModelView
__all__ = ['EndiciaConfiguration']
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView):
"""
Configuration settings for Endicia.
"""
__name__ = 'endicia.configuration'
account_id = fields.Integer('Account Id')
requester_id = fields.Char('Requester Id')
passphrase = fields.Char('Passphrase')
is_test = fields.Boolean('Is Test')
@classmethod
def __setup__(cls):
super(EndiciaConfiguration, cls).__setup__()
cls._error_messages.update({
'endicia_credentials_required':
'Endicia settings on endicia configuration are incomplete.',
})
def get_endicia_credentials(self):
"""Validate if endicia credentials are complete.
"""
if not all([
self.account_id,
self.requester_id,
self.passphrase
]):
self.raise_user_error('endicia_credentials_required')
return self
|
de89049649fe720d45b271f519674845104f1941 | flow_workflow/petri_net/future_nets/base.py | flow_workflow/petri_net/future_nets/base.py | from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class SimplifiedSuccessFailureNet(FutureNet):
def __init__(self, name=''):
FutureNet.__init__(self, name=name)
# Internal -- subclasses should connect to these
self.internal_start_transition = self.add_basic_transition('internal-start')
self.internal_failure_place = self.add_place('internal-failure')
self.internal_success_place = self.add_place('internal-success')
# Transitions to observe -- owners and subclasses may observe these
self.start_transition = self.add_basic_transition(name='start')
self.bridge_transitions(self.start_transition, self.internal_start_transition)
self.failure_transition = self.add_basic_transition(name='failure')
self.failure_transition.add_arc_in(self.internal_failure_place)
self.success_transition = self.add_basic_transition(name='success')
self.failure_transition.add_arc_in(self.internal_success_place)
class GenomeNetBase(SimplifiedSuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SimplifiedSuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
| from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class GenomeNetBase(SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
| Make GenomeNetBase a SuccessFailureNet again | Make GenomeNetBase a SuccessFailureNet again
| Python | agpl-3.0 | genome/flow-workflow,genome/flow-workflow,genome/flow-workflow | <REPLACE_OLD> SimplifiedSuccessFailureNet(FutureNet):
def __init__(self, name=''):
FutureNet.__init__(self, name=name)
# Internal -- subclasses should connect to these
self.internal_start_transition = self.add_basic_transition('internal-start')
self.internal_failure_place = self.add_place('internal-failure')
self.internal_success_place = self.add_place('internal-success')
# Transitions to observe -- owners and subclasses may observe these
self.start_transition = self.add_basic_transition(name='start')
self.bridge_transitions(self.start_transition, self.internal_start_transition)
self.failure_transition = self.add_basic_transition(name='failure')
self.failure_transition.add_arc_in(self.internal_failure_place)
self.success_transition = self.add_basic_transition(name='success')
self.failure_transition.add_arc_in(self.internal_success_place)
class GenomeNetBase(SimplifiedSuccessFailureNet):
<REPLACE_NEW> GenomeNetBase(SuccessFailureNet):
<REPLACE_END> <REPLACE_OLD> SimplifiedSuccessFailureNet.__init__(self, <REPLACE_NEW> SuccessFailureNet.__init__(self, <REPLACE_END> <|endoftext|> from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class GenomeNetBase(SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
| Make GenomeNetBase a SuccessFailureNet again
from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class SimplifiedSuccessFailureNet(FutureNet):
def __init__(self, name=''):
FutureNet.__init__(self, name=name)
# Internal -- subclasses should connect to these
self.internal_start_transition = self.add_basic_transition('internal-start')
self.internal_failure_place = self.add_place('internal-failure')
self.internal_success_place = self.add_place('internal-success')
# Transitions to observe -- owners and subclasses may observe these
self.start_transition = self.add_basic_transition(name='start')
self.bridge_transitions(self.start_transition, self.internal_start_transition)
self.failure_transition = self.add_basic_transition(name='failure')
self.failure_transition.add_arc_in(self.internal_failure_place)
self.success_transition = self.add_basic_transition(name='success')
self.failure_transition.add_arc_in(self.internal_success_place)
class GenomeNetBase(SimplifiedSuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SimplifiedSuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
|
730aaf64635268df8d3c5cd3e1d5e2448644c907 | problem-static/Intro-Eval_50/admin/eval.py | problem-static/Intro-Eval_50/admin/eval.py | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() | #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(input("What would you like to do? "))
print command
except Exception, e:
print "Invalid command!"
continue
main()
| Make Intro Eval use input instead of raw_input | Make Intro Eval use input instead of raw_input
| Python | mit | james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF | <REPLACE_OLD> str(raw_input("What <REPLACE_NEW> str(input("What <REPLACE_END> <DELETE> result = str(eval(command))
<DELETE_END> <REPLACE_OLD> "This is the result: %s" %(result)
<REPLACE_NEW> command
<REPLACE_END> <REPLACE_OLD> command!!!! EXITING!!!!!"
<REPLACE_NEW> command!"
<REPLACE_END> <REPLACE_OLD> return
main() <REPLACE_NEW> continue
main()
<REPLACE_END> <|endoftext|> #!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(input("What would you like to do? "))
print command
except Exception, e:
print "Invalid command!"
continue
main()
| Make Intro Eval use input instead of raw_input
#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
while True:
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main() |
018583a7b8ce3b74b3942402b37b642d37b54c6d | scripts/prepared_json_to_fasta.py | scripts/prepared_json_to_fasta.py | """
Convert a prepared JSON file from augur into a FASTA file.
"""
import argparse
import Bio
import json
import logging
import sys
sys.path.append('..')
from base.sequences_process import sequence_set
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("json", help="prepared JSON from augur")
parser.add_argument("fasta", help="FASTA output for sequences in JSON file")
args = parser.parse_args()
# Setup the logger.
logger = logging.getLogger(__name__)
# Load the JSON data.
with open(args.json, "r") as fh:
data = json.load(fh)
# Prepare a sequence set.
sequences = sequence_set(
logger,
data["sequences"],
data["reference"],
data["info"]["date_format"]
)
# Add the reference to output sequences if it isn't already included.
output_sequences = sequences.seqs.values()
if not sequences.reference_in_dataset:
output_sequences.append(sequences.reference_seq)
# Write sequences to disk.
Bio.SeqIO.write(output_sequences, args.fasta, "fasta")
| """
Convert a prepared JSON file from augur into a FASTA file.
"""
import argparse
import Bio
import json
import logging
import sys
sys.path.append('..')
from base.sequences_process import sequence_set
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert a prepared JSON file from augur into a FASTA file.")
parser.add_argument("json", help="prepared JSON from augur")
args = parser.parse_args()
# Setup the logger.
logger = logging.getLogger(__name__)
# Load the JSON data.
with open(args.json, "r") as fh:
data = json.load(fh)
# Prepare a sequence set.
sequences = sequence_set(
logger,
data["sequences"],
data["reference"],
data["info"]["date_format"]
)
# Add the reference to output sequences if it isn't already included.
output_sequences = sequences.seqs.values()
if not sequences.reference_in_dataset:
output_sequences.append(sequences.reference_seq)
# Write sequences to standard out.
Bio.SeqIO.write(output_sequences, sys.stdout, "fasta")
| Write FASTA output to standard out. | Write FASTA output to standard out.
| Python | agpl-3.0 | blab/nextstrain-augur,nextstrain/augur,nextstrain/augur,nextstrain/augur | <REPLACE_OLD> argparse.ArgumentParser()
<REPLACE_NEW> argparse.ArgumentParser(description="Convert a prepared JSON file from augur into a FASTA file.")
<REPLACE_END> <REPLACE_OLD> augur")
parser.add_argument("fasta", help="FASTA output for sequences in JSON file")
<REPLACE_NEW> augur")
<REPLACE_END> <REPLACE_OLD> disk.
<REPLACE_NEW> standard out.
<REPLACE_END> <REPLACE_OLD> args.fasta, <REPLACE_NEW> sys.stdout, <REPLACE_END> <|endoftext|> """
Convert a prepared JSON file from augur into a FASTA file.
"""
import argparse
import Bio
import json
import logging
import sys
sys.path.append('..')
from base.sequences_process import sequence_set
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert a prepared JSON file from augur into a FASTA file.")
parser.add_argument("json", help="prepared JSON from augur")
args = parser.parse_args()
# Setup the logger.
logger = logging.getLogger(__name__)
# Load the JSON data.
with open(args.json, "r") as fh:
data = json.load(fh)
# Prepare a sequence set.
sequences = sequence_set(
logger,
data["sequences"],
data["reference"],
data["info"]["date_format"]
)
# Add the reference to output sequences if it isn't already included.
output_sequences = sequences.seqs.values()
if not sequences.reference_in_dataset:
output_sequences.append(sequences.reference_seq)
# Write sequences to standard out.
Bio.SeqIO.write(output_sequences, sys.stdout, "fasta")
| Write FASTA output to standard out.
"""
Convert a prepared JSON file from augur into a FASTA file.
"""
import argparse
import Bio
import json
import logging
import sys
sys.path.append('..')
from base.sequences_process import sequence_set
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("json", help="prepared JSON from augur")
parser.add_argument("fasta", help="FASTA output for sequences in JSON file")
args = parser.parse_args()
# Setup the logger.
logger = logging.getLogger(__name__)
# Load the JSON data.
with open(args.json, "r") as fh:
data = json.load(fh)
# Prepare a sequence set.
sequences = sequence_set(
logger,
data["sequences"],
data["reference"],
data["info"]["date_format"]
)
# Add the reference to output sequences if it isn't already included.
output_sequences = sequences.seqs.values()
if not sequences.reference_in_dataset:
output_sequences.append(sequences.reference_seq)
# Write sequences to disk.
Bio.SeqIO.write(output_sequences, args.fasta, "fasta")
|
d6b3c47169082eeee6f1f01458b8791de2573849 | kolibri/plugins/management/kolibri_plugin.py | kolibri/plugins/management/kolibri_plugin.py |
from __future__ import absolute_import, print_function, unicode_literals
from kolibri.plugins.base import KolibriFrontEndPluginBase
class ManagementModule(KolibriFrontEndPluginBase):
"""
The Management module.
"""
entry_file = "assets/src/management.js"
base_url = "management"
template = "management/management.html"
def nav_items(self):
return (
{
"url": "foo/bar",
"text": "Management Foo!"
},
)
def user_nav_items(self):
return (
{
"url": "learners",
"text": "Learner Management"
},
)
PLUGINS = (
ManagementModule,
)
| from __future__ import absolute_import, print_function, unicode_literals
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class ManagementPlugin(KolibriPluginBase):
""" Required boilerplate so that the module is recognized as a plugin """
pass
class ManagementAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "management_module"
src_file = "kolibri/plugins/management/assets/src/management.js"
static_dir = "kolibri/plugins/management/static"
class ManagementInclusionHook(webpack_hooks.FrontEndBaseSyncHook):
bundle_class = ManagementAsset
| Use new plugin classes for management | Use new plugin classes for management
| Python | mit | 66eli77/kolibri,learningequality/kolibri,indirectlylit/kolibri,lyw07/kolibri,jtamiace/kolibri,learningequality/kolibri,aronasorman/kolibri,jamalex/kolibri,christianmemije/kolibri,rtibbles/kolibri,benjaoming/kolibri,jtamiace/kolibri,jayoshih/kolibri,MingDai/kolibri,DXCanas/kolibri,jamalex/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,aronasorman/kolibri,whitzhu/kolibri,66eli77/kolibri,lyw07/kolibri,MCGallaspy/kolibri,jtamiace/kolibri,mrpau/kolibri,jamalex/kolibri,benjaoming/kolibri,DXCanas/kolibri,jtamiace/kolibri,christianmemije/kolibri,christianmemije/kolibri,jonboiser/kolibri,DXCanas/kolibri,learningequality/kolibri,lyw07/kolibri,ralphiee22/kolibri,indirectlylit/kolibri,MingDai/kolibri,mrpau/kolibri,rtibbles/kolibri,jonboiser/kolibri,ralphiee22/kolibri,ralphiee22/kolibri,lyw07/kolibri,MCGallaspy/kolibri,whitzhu/kolibri,MingDai/kolibri,MingDai/kolibri,MCGallaspy/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,jayoshih/kolibri,jonboiser/kolibri,66eli77/kolibri,whitzhu/kolibri,jonboiser/kolibri,christianmemije/kolibri,learningequality/kolibri,jayoshih/kolibri,rtibbles/kolibri,benjaoming/kolibri,66eli77/kolibri,aronasorman/kolibri,jamalex/kolibri,ralphiee22/kolibri,aronasorman/kolibri,DXCanas/kolibri,benjaoming/kolibri,whitzhu/kolibri,jayoshih/kolibri | <REPLACE_OLD>
from <REPLACE_NEW> from <REPLACE_END> <INSERT> kolibri.core.webpack import hooks as webpack_hooks
from <INSERT_END> <REPLACE_OLD> KolibriFrontEndPluginBase
class ManagementModule(KolibriFrontEndPluginBase):
<REPLACE_NEW> KolibriPluginBase
class ManagementPlugin(KolibriPluginBase):
<REPLACE_END> <INSERT> """ Required boilerplate so that the module is recognized as a plugin <INSERT_END> <REPLACE_OLD> The Management module.
<REPLACE_NEW> pass
class ManagementAsset(webpack_hooks.WebpackBundleHook):
<REPLACE_END> <REPLACE_OLD> """
<REPLACE_NEW> unique_slug = "management_module"
<REPLACE_END> <REPLACE_OLD> entry_file <REPLACE_NEW> src_file <REPLACE_END> <REPLACE_OLD> "assets/src/management.js"
<REPLACE_NEW> "kolibri/plugins/management/assets/src/management.js"
<REPLACE_END> <REPLACE_OLD> base_url <REPLACE_NEW> static_dir <REPLACE_END> <REPLACE_OLD> "management"
<REPLACE_NEW> "kolibri/plugins/management/static"
class ManagementInclusionHook(webpack_hooks.FrontEndBaseSyncHook):
<REPLACE_END> <REPLACE_OLD> template <REPLACE_NEW> bundle_class <REPLACE_END> <REPLACE_OLD> "management/management.html"
def nav_items(self):
return (
{
"url": "foo/bar",
"text": "Management Foo!"
},
)
def user_nav_items(self):
return (
{
"url": "learners",
"text": "Learner Management"
},
)
PLUGINS = (
ManagementModule,
)
<REPLACE_NEW> ManagementAsset
<REPLACE_END> <|endoftext|> from __future__ import absolute_import, print_function, unicode_literals
from kolibri.core.webpack import hooks as webpack_hooks
from kolibri.plugins.base import KolibriPluginBase
class ManagementPlugin(KolibriPluginBase):
""" Required boilerplate so that the module is recognized as a plugin """
pass
class ManagementAsset(webpack_hooks.WebpackBundleHook):
unique_slug = "management_module"
src_file = "kolibri/plugins/management/assets/src/management.js"
static_dir = "kolibri/plugins/management/static"
class ManagementInclusionHook(webpack_hooks.FrontEndBaseSyncHook):
bundle_class = ManagementAsset
| Use new plugin classes for management
from __future__ import absolute_import, print_function, unicode_literals
from kolibri.plugins.base import KolibriFrontEndPluginBase
class ManagementModule(KolibriFrontEndPluginBase):
"""
The Management module.
"""
entry_file = "assets/src/management.js"
base_url = "management"
template = "management/management.html"
def nav_items(self):
return (
{
"url": "foo/bar",
"text": "Management Foo!"
},
)
def user_nav_items(self):
return (
{
"url": "learners",
"text": "Learner Management"
},
)
PLUGINS = (
ManagementModule,
)
|
63c81a18bd95876cad1bd4c1269d38e18ee3e817 | wikichatter/TalkPageParser.py | wikichatter/TalkPageParser.py | import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
| import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
| Make mwparserfromhell skip style tags | Make mwparserfromhell skip style tags
Since we do not really care if '' and ''' tags are processed
as plaintext or not, and not processing them as plaintext
causes #10
| Python | mit | kjschiroo/WikiChatter | <INSERT> # skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
<INSERT_END> <REPLACE_OLD> mwp.parse(text)
<REPLACE_NEW> mwp.parse(text, skip_style_tags=True)
<REPLACE_END> <|endoftext|> import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
| Make mwparserfromhell skip style tags
Since we do not really care if '' and ''' tags are processed
as plaintext or not, and not processing them as plaintext
causes #10
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
d71b2f3b8943465ebe04aa9926cba0159402da96 | tests/test_sorting.py | tests/test_sorting.py | import os
from textwrap import dedent
from autosort.sorting import sort_imports
def test_regular():
path = os.path.abspath('test.py')
rv = sort_imports(dedent('''
from tokenize import COMMENT, INDENT, ENDMARKER
from tokenize import (DEDENT, # noqa
NEWLINE, STRING,
NAME)
'''), path)
assert rv == dedent('''\
from tokenize import (COMMENT, DEDENT, ENDMARKER, # noqa
INDENT, NAME, NEWLINE, STRING) # noqa
''')
| import os
from textwrap import dedent
from autosort.sorting import sort_imports
def test_regular():
path = os.path.abspath('test.py')
rv = sort_imports(dedent('''\
from tokenize import COMMENT, INDENT, ENDMARKER
from tokenize import (DEDENT, # noqa
NEWLINE, STRING,
NAME)
'''), path)
assert rv == dedent('''\
from tokenize import (COMMENT, DEDENT, ENDMARKER, # noqa
INDENT, NAME, NEWLINE, STRING) # noqa
''')
| Remove leading empty line in multiline test | Remove leading empty line in multiline test
| Python | mit | fbergroth/autosort | <REPLACE_OLD> sort_imports(dedent('''
<REPLACE_NEW> sort_imports(dedent('''\
<REPLACE_END> <REPLACE_OLD> noqa
<REPLACE_NEW> noqa
<REPLACE_END> <|endoftext|> import os
from textwrap import dedent
from autosort.sorting import sort_imports
def test_regular():
path = os.path.abspath('test.py')
rv = sort_imports(dedent('''\
from tokenize import COMMENT, INDENT, ENDMARKER
from tokenize import (DEDENT, # noqa
NEWLINE, STRING,
NAME)
'''), path)
assert rv == dedent('''\
from tokenize import (COMMENT, DEDENT, ENDMARKER, # noqa
INDENT, NAME, NEWLINE, STRING) # noqa
''')
| Remove leading empty line in multiline test
import os
from textwrap import dedent
from autosort.sorting import sort_imports
def test_regular():
path = os.path.abspath('test.py')
rv = sort_imports(dedent('''
from tokenize import COMMENT, INDENT, ENDMARKER
from tokenize import (DEDENT, # noqa
NEWLINE, STRING,
NAME)
'''), path)
assert rv == dedent('''\
from tokenize import (COMMENT, DEDENT, ENDMARKER, # noqa
INDENT, NAME, NEWLINE, STRING) # noqa
''')
|
d788375843d42d1de3c0143064e905a932394e30 | library/tests/test_factories.py | library/tests/test_factories.py | import pytest
from .factories import BookFactory, BookSpecimenFactory
pytestmark = pytest.mark.django_db
def test_it_should_create_a_default_book_from_factory():
book = BookFactory()
assert book.pk is not None
assert unicode(book)
def test_it_should_override_book_fields_passed_to_factory():
book = BookFactory()
assert book.title.startswith('Test book')
another = BookFactory(title="My custom title")
assert another.title == "My custom title"
def test_it_should_create_a_default_book_specimen_from_factory():
specimen = BookSpecimenFactory()
assert specimen.pk is not None
assert unicode(specimen)
def test_it_should_override_specimen_fields_passed_to_factory():
book = BookFactory()
specimen = BookSpecimenFactory(book=book)
assert specimen.book == book
| import pytest
from .factories import BookFactory, BookSpecimenFactory
pytestmark = pytest.mark.django_db
def test_it_should_create_a_default_book_from_factory():
book = BookFactory()
assert book.pk is not None
assert unicode(book)
def test_it_should_override_book_fields_passed_to_factory():
book = BookFactory()
assert book.title.startswith('Test book')
another = BookFactory(title="My custom title")
assert another.title == "My custom title"
def test_it_should_create_a_default_book_specimen_from_factory():
specimen = BookSpecimenFactory()
assert specimen.pk is not None
assert specimen.book.pk is not None
assert unicode(specimen)
def test_it_should_override_specimen_fields_passed_to_factory():
book = BookFactory()
specimen = BookSpecimenFactory(book=book)
assert specimen.book == book
| Test that BookSpecimenFactory also creates the related book | Test that BookSpecimenFactory also creates the related book
| Python | agpl-3.0 | ideascube/ideascube,ideascube/ideascube,Lcaracol/ideasbox.lan,ideascube/ideascube,Lcaracol/ideasbox.lan,Lcaracol/ideasbox.lan,ideascube/ideascube | <INSERT> specimen.book.pk is not None
assert <INSERT_END> <|endoftext|> import pytest
from .factories import BookFactory, BookSpecimenFactory
pytestmark = pytest.mark.django_db
def test_it_should_create_a_default_book_from_factory():
book = BookFactory()
assert book.pk is not None
assert unicode(book)
def test_it_should_override_book_fields_passed_to_factory():
book = BookFactory()
assert book.title.startswith('Test book')
another = BookFactory(title="My custom title")
assert another.title == "My custom title"
def test_it_should_create_a_default_book_specimen_from_factory():
specimen = BookSpecimenFactory()
assert specimen.pk is not None
assert specimen.book.pk is not None
assert unicode(specimen)
def test_it_should_override_specimen_fields_passed_to_factory():
book = BookFactory()
specimen = BookSpecimenFactory(book=book)
assert specimen.book == book
| Test that BookSpecimenFactory also creates the related book
import pytest
from .factories import BookFactory, BookSpecimenFactory
pytestmark = pytest.mark.django_db
def test_it_should_create_a_default_book_from_factory():
book = BookFactory()
assert book.pk is not None
assert unicode(book)
def test_it_should_override_book_fields_passed_to_factory():
book = BookFactory()
assert book.title.startswith('Test book')
another = BookFactory(title="My custom title")
assert another.title == "My custom title"
def test_it_should_create_a_default_book_specimen_from_factory():
specimen = BookSpecimenFactory()
assert specimen.pk is not None
assert unicode(specimen)
def test_it_should_override_specimen_fields_passed_to_factory():
book = BookFactory()
specimen = BookSpecimenFactory(book=book)
assert specimen.book == book
|
d8a5d6d6478ae8267ccd9d1e4db710f8decb7991 | wiki/achievements.py | wiki/achievements.py | import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print "\t", achievement.encode('utf-8')
| import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print ("\t", achievement.encode('utf-8'))
| Use print as a statement | janitoring: Use print as a statement
- Let's be Python 3 compatible.
Signed-off-by: mr.Shu <[email protected]>
| Python | apache-2.0 | Motivatix/wikipedia-achievements-processing | <REPLACE_OLD> "\t", achievement.encode('utf-8')
<REPLACE_NEW> ("\t", achievement.encode('utf-8'))
<REPLACE_END> <|endoftext|> import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print ("\t", achievement.encode('utf-8'))
| janitoring: Use print as a statement
- Let's be Python 3 compatible.
Signed-off-by: mr.Shu <[email protected]>
import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print "\t", achievement.encode('utf-8')
|
30be74075e761f932a10ea0806a08991b8fd9cb4 | code/python/find-nodes-without-external-id.py | code/python/find-nodes-without-external-id.py | #!/usr/bin/env python
import httplib
import urllib
import json
import ssl
import argparse
import re
parser = argparse.ArgumentParser(description='Find any node that does not have an external ID set.')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance. This should be the hostname only (appliance.upguard.org instead of https://appliance.upguard.org)')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate check?')
parser.add_argument('--per-page', type=int, default=10, help='Number of nodes to retrieve in each call. (Default: 100)')
args = parser.parse_args()
# Initializations
browser = None
def getNodes(browser, method, endpoint, page=1, per_page=100):
"""
Return a JSON-parsed dictionary of nodes
"""
get_headers = {
"Authorization": "Token token=\"{}{}\"".format(args.api_key, args.secret_key),
"Accept": "application/json"}
browser.request("GET", "{}?page={}&per_page={}".format(endpoint, page, per_page), '', get_headers)
response = browser.getresponse()
if response.status >= 400:
raise httplib.HTTPException("{}: {}".format(str(response.status), str(response.reason)))
return json.loads(response.read())
try:
# Setup browser object
url = args.target_url
if 'http' in url:
# URL needs to be a hostname, so remove 'https://'
url = re.sub('https?:\/\/', '', url)
browser = httplib.HTTPConnection(url)
if args.insecure:
context = ssl._create_unverified_context()
browser = httplib.HTTPSConnection(url, context=context)
page = 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
print "Searching for nodes without an external ID..."
while nodes:
for node in nodes:
if not node['external_id']:
print "{} (hostname: {})".format(node['name'])
page += 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
except httplib.HTTPException as h:
print h.message;
finally:
if browser:
browser.close()
| Add script to list nodes without an external ID | Add script to list nodes without an external ID
| Python | mit | ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content | <REPLACE_OLD> <REPLACE_NEW> #!/usr/bin/env python
import httplib
import urllib
import json
import ssl
import argparse
import re
parser = argparse.ArgumentParser(description='Find any node that does not have an external ID set.')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance. This should be the hostname only (appliance.upguard.org instead of https://appliance.upguard.org)')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate check?')
parser.add_argument('--per-page', type=int, default=10, help='Number of nodes to retrieve in each call. (Default: 100)')
args = parser.parse_args()
# Initializations
browser = None
def getNodes(browser, method, endpoint, page=1, per_page=100):
"""
Return a JSON-parsed dictionary of nodes
"""
get_headers = {
"Authorization": "Token token=\"{}{}\"".format(args.api_key, args.secret_key),
"Accept": "application/json"}
browser.request("GET", "{}?page={}&per_page={}".format(endpoint, page, per_page), '', get_headers)
response = browser.getresponse()
if response.status >= 400:
raise httplib.HTTPException("{}: {}".format(str(response.status), str(response.reason)))
return json.loads(response.read())
try:
# Setup browser object
url = args.target_url
if 'http' in url:
# URL needs to be a hostname, so remove 'https://'
url = re.sub('https?:\/\/', '', url)
browser = httplib.HTTPConnection(url)
if args.insecure:
context = ssl._create_unverified_context()
browser = httplib.HTTPSConnection(url, context=context)
page = 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
print "Searching for nodes without an external ID..."
while nodes:
for node in nodes:
if not node['external_id']:
print "{} (hostname: {})".format(node['name'])
page += 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
except httplib.HTTPException as h:
print h.message;
finally:
if browser:
browser.close()
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
import httplib
import urllib
import json
import ssl
import argparse
import re
parser = argparse.ArgumentParser(description='Find any node that does not have an external ID set.')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance. This should be the hostname only (appliance.upguard.org instead of https://appliance.upguard.org)')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate check?')
parser.add_argument('--per-page', type=int, default=10, help='Number of nodes to retrieve in each call. (Default: 100)')
args = parser.parse_args()
# Initializations
browser = None
def getNodes(browser, method, endpoint, page=1, per_page=100):
"""
Return a JSON-parsed dictionary of nodes
"""
get_headers = {
"Authorization": "Token token=\"{}{}\"".format(args.api_key, args.secret_key),
"Accept": "application/json"}
browser.request("GET", "{}?page={}&per_page={}".format(endpoint, page, per_page), '', get_headers)
response = browser.getresponse()
if response.status >= 400:
raise httplib.HTTPException("{}: {}".format(str(response.status), str(response.reason)))
return json.loads(response.read())
try:
# Setup browser object
url = args.target_url
if 'http' in url:
# URL needs to be a hostname, so remove 'https://'
url = re.sub('https?:\/\/', '', url)
browser = httplib.HTTPConnection(url)
if args.insecure:
context = ssl._create_unverified_context()
browser = httplib.HTTPSConnection(url, context=context)
page = 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
print "Searching for nodes without an external ID..."
while nodes:
for node in nodes:
if not node['external_id']:
print "{} (hostname: {})".format(node['name'])
page += 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
except httplib.HTTPException as h:
print h.message;
finally:
if browser:
browser.close()
| Add script to list nodes without an external ID
|
|
2f9c912c9071a498feb8d9cca69e447ffec397be | polygamy/pygit2_git.py | polygamy/pygit2_git.py | from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
| from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| Implement set_remote_url in pygit2 implementation | Implement set_remote_url in pygit2 implementation
| Python | bsd-3-clause | solarnz/polygamy,solarnz/polygamy | <INSERT> _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def <INSERT_END> <REPLACE_OLD> pygit2.Repository(path)
for <REPLACE_NEW> pygit2.Repository(path)
<REPLACE_END> <REPLACE_OLD> in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
<REPLACE_NEW> = Pygit2Git._find_remote(repo, remote_name)
<REPLACE_END> <REPLACE_OLD> remote_url)
<REPLACE_NEW> remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
<REPLACE_END> <|endoftext|> from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def _find_remote(repo, remote_name):
for remote in repo.remotes:
if remote.name == remote_name:
return remote
else:
raise NoSuchRemote()
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
@staticmethod
def set_remote_url(path, remote_name, remote_url):
repo = pygit2.Repository(path)
remote = Pygit2Git._find_remote(repo, remote_name)
remote.url = remote_url
remote.save()
| Implement set_remote_url in pygit2 implementation
from __future__ import absolute_import
import pygit2
from .base_git import NoSuchRemote
from .plain_git import PlainGit
class Pygit2Git(PlainGit):
@staticmethod
def is_on_branch(path):
repo = pygit2.Repository(path)
return not (repo.head_is_detached or repo.head_is_unborn)
@staticmethod
def get_remote_url(path, remote_name):
repo = pygit2.Repository(path)
for remote in repo.remotes:
if remote.name == remote_name:
break
else:
raise NoSuchRemote()
return remote.url
@staticmethod
def add_remote(path, remote_name, remote_url):
repo = pygit2.Repository(path)
repo.create_remote(remote_name, remote_url)
|
f29477416729df9cc198f679a2478f6a077ce365 | app/util.py | app/util.py | # Various utility functions
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
| # Various utility functions
import inspect
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
| Make cached_function not overwrite signature of wrapped function | Make cached_function not overwrite signature of wrapped function
| Python | mit | albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com | <INSERT> inspect
import <INSERT_END> <INSERT> wrapper.__signature__ = inspect.signature(func) # type: ignore
<INSERT_END> <|endoftext|> # Various utility functions
import inspect
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
| Make cached_function not overwrite signature of wrapped function
# Various utility functions
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
|
9d0ea4eaf8269350fabc3415545bebf4da4137a7 | source/segue/backend/processor/background.py | source/segue/backend/processor/background.py | # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import multiprocessing
from .base import Processor
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
process = multiprocessing.Process(target=command, args=args, kwargs=kw)
process.start()
process.join()
| # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import multiprocessing
from .base import Processor
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
if args is None:
args = ()
if kw is None:
kw = {}
process = multiprocessing.Process(target=command, args=args, kwargs=kw)
process.start()
process.join()
| Fix passing invalid None to multiprocessing Process class. | Fix passing invalid None to multiprocessing Process class.
| Python | apache-2.0 | 4degrees/segue | <INSERT> if args is None:
args = ()
if kw is None:
kw = {}
<INSERT_END> <|endoftext|> # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import multiprocessing
from .base import Processor
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
if args is None:
args = ()
if kw is None:
kw = {}
process = multiprocessing.Process(target=command, args=args, kwargs=kw)
process.start()
process.join()
| Fix passing invalid None to multiprocessing Process class.
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import multiprocessing
from .base import Processor
class BackgroundProcessor(Processor):
'''Local background processor.'''
def process(self, command, args=None, kw=None):
'''Process *command* with *args* and *kw*.'''
process = multiprocessing.Process(target=command, args=args, kwargs=kw)
process.start()
process.join()
|
1713cf8553d7f21d1192ed58138ecf7875c4b181 | icebergsdk/front_modules.py | icebergsdk/front_modules.py | # -*- coding: utf-8 -*-
import logging
from icebergsdk.mixins.request_mixin import IcebergRequestBase
logger = logging.getLogger('icebergsdk.frontmodules')
class FrontModules(IcebergRequestBase):
cache_key = "icebergsdk:frontmodule:data"
cache_expire = 60*60 # one hour
def __init__(self, *args, **kwargs):
super(FrontModules, self).__init__(*args, **kwargs)
self.cache = kwargs.get('cache', None)
self.lang = kwargs.get('lang', "en")
def get_module_data(self, module_name):
return self.modules_data['modules'][module_name]
####
# Loader
####
@property
def modules_data(self):
"""
Helper to fetch Iceberg client side javascript templates
"""
if hasattr(self, "_modules_data"):
return getattr(self, "_modules_data")
if self.cache:
data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False)
if data:
setattr(self, '_modules_data', data)
return data
data = self.request(self.conf.ICEBERG_MODULES_URL) # Do to, add lang
setattr(self, '_modules_data', data)
if self.cache:
self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire)
return data
| # -*- coding: utf-8 -*-
import logging
from icebergsdk.mixins.request_mixin import IcebergRequestBase
logger = logging.getLogger('icebergsdk.frontmodules')
class FrontModules(IcebergRequestBase):
cache_key = "icebergsdk:frontmodule:data"
cache_expire = 60*60 # one hour
def __init__(self, *args, **kwargs):
super(FrontModules, self).__init__(*args, **kwargs)
self.cache = kwargs.get('cache', None)
self.lang = kwargs.get('lang', "en")
self.debug = kwargs.get('debug', False)
def get_module_data(self, module_name):
return self.modules_data['modules'][module_name]
####
# Loader
####
@property
def modules_data(self):
"""
Helper to fetch Iceberg client side javascript templates
"""
if hasattr(self, "_modules_data"):
return getattr(self, "_modules_data")
if self.cache:
data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False)
if data:
setattr(self, '_modules_data', data)
return data
data = self.request(self.conf.ICEBERG_MODULES_URL, args = {
"lang": self.lang,
"enviro": self.conf.ICEBERG_ENV,
"debug": self.debug
}) # Do to, add lang
setattr(self, '_modules_data', data)
if self.cache:
self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire)
return data
| Add lang, enviro in request | Add lang, enviro in request
| Python | mit | izberg-marketplace/izberg-api-python,Iceberg-Marketplace/Iceberg-API-PYTHON | <REPLACE_OLD> "en")
<REPLACE_NEW> "en")
self.debug = kwargs.get('debug', False)
<REPLACE_END> <REPLACE_OLD> self.request(self.conf.ICEBERG_MODULES_URL) <REPLACE_NEW> self.request(self.conf.ICEBERG_MODULES_URL, args = {
"lang": self.lang,
"enviro": self.conf.ICEBERG_ENV,
"debug": self.debug
}) <REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
import logging
from icebergsdk.mixins.request_mixin import IcebergRequestBase
logger = logging.getLogger('icebergsdk.frontmodules')
class FrontModules(IcebergRequestBase):
cache_key = "icebergsdk:frontmodule:data"
cache_expire = 60*60 # one hour
def __init__(self, *args, **kwargs):
super(FrontModules, self).__init__(*args, **kwargs)
self.cache = kwargs.get('cache', None)
self.lang = kwargs.get('lang', "en")
self.debug = kwargs.get('debug', False)
def get_module_data(self, module_name):
return self.modules_data['modules'][module_name]
####
# Loader
####
@property
def modules_data(self):
"""
Helper to fetch Iceberg client side javascript templates
"""
if hasattr(self, "_modules_data"):
return getattr(self, "_modules_data")
if self.cache:
data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False)
if data:
setattr(self, '_modules_data', data)
return data
data = self.request(self.conf.ICEBERG_MODULES_URL, args = {
"lang": self.lang,
"enviro": self.conf.ICEBERG_ENV,
"debug": self.debug
}) # Do to, add lang
setattr(self, '_modules_data', data)
if self.cache:
self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire)
return data
| Add lang, enviro in request
# -*- coding: utf-8 -*-
import logging
from icebergsdk.mixins.request_mixin import IcebergRequestBase
logger = logging.getLogger('icebergsdk.frontmodules')
class FrontModules(IcebergRequestBase):
cache_key = "icebergsdk:frontmodule:data"
cache_expire = 60*60 # one hour
def __init__(self, *args, **kwargs):
super(FrontModules, self).__init__(*args, **kwargs)
self.cache = kwargs.get('cache', None)
self.lang = kwargs.get('lang', "en")
def get_module_data(self, module_name):
return self.modules_data['modules'][module_name]
####
# Loader
####
@property
def modules_data(self):
"""
Helper to fetch Iceberg client side javascript templates
"""
if hasattr(self, "_modules_data"):
return getattr(self, "_modules_data")
if self.cache:
data = self.cache.get("%s:%s" % (self.cache_key, self.lang), False)
if data:
setattr(self, '_modules_data', data)
return data
data = self.request(self.conf.ICEBERG_MODULES_URL) # Do to, add lang
setattr(self, '_modules_data', data)
if self.cache:
self.cache.set("%s:%s" % (self.cache_key, self.lang), data, self.cache_expire)
return data
|
0e54e8ac75acbd289c2fde2d7fae486cc31ab3ab | tests/test_block_aio.py | tests/test_block_aio.py | # -*- coding: utf-8 -*-
import aiounittest
from graphenecommon.utils import parse_time
from .fixtures_aio import fixture_data, Block, BlockHeader
class Testcases(aiounittest.AsyncTestCase):
def setUp(self):
fixture_data()
async def test_block(self):
block = await Block(1)
self.assertEqual(block["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(block.time(), parse_time("2015-10-13T14:12:24"))
async def test_blockheader(self):
header = await BlockHeader(1)
self.assertEqual(header["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(header.time(), parse_time("2015-10-13T14:12:24"))
| Add test for async Block | Add test for async Block
| Python | mit | xeroc/python-graphenelib | <INSERT> # -*- coding: utf-8 -*-
import aiounittest
from graphenecommon.utils import parse_time
from .fixtures_aio import fixture_data, Block, BlockHeader
class Testcases(aiounittest.AsyncTestCase):
<INSERT_END> <INSERT> def setUp(self):
fixture_data()
async def test_block(self):
block = await Block(1)
self.assertEqual(block["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(block.time(), parse_time("2015-10-13T14:12:24"))
async def test_blockheader(self):
header = await BlockHeader(1)
self.assertEqual(header["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(header.time(), parse_time("2015-10-13T14:12:24"))
<INSERT_END> <|endoftext|> # -*- coding: utf-8 -*-
import aiounittest
from graphenecommon.utils import parse_time
from .fixtures_aio import fixture_data, Block, BlockHeader
class Testcases(aiounittest.AsyncTestCase):
def setUp(self):
fixture_data()
async def test_block(self):
block = await Block(1)
self.assertEqual(block["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(block.time(), parse_time("2015-10-13T14:12:24"))
async def test_blockheader(self):
header = await BlockHeader(1)
self.assertEqual(header["previous"], "0000000000000000000000000000000000000000")
self.assertEqual(header.time(), parse_time("2015-10-13T14:12:24"))
| Add test for async Block
|
|
ab36778ec3c8ed69ce798816161ee35a368e2dc2 | tests/test_base.py | tests/test_base.py | # Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
| Improve unit tests for python-glanceclient.glanceclient.common.base | Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248
| Python | apache-2.0 | ntt-sic/python-glanceclient,citrix-openstack-build/python-glanceclient,metacloud/python-glanceclient,alexpilotti/python-glanceclient,metacloud/python-glanceclient,klmitch/python-glanceclient,klmitch/python-glanceclient,citrix-openstack-build/python-glanceclient,varunarya10/python-glanceclient,JioCloud/python-glanceclient,alexpilotti/python-glanceclient,openstack/python-glanceclient,mmasaki/python-glanceclient,openstack/python-glanceclient,ntt-sic/python-glanceclient,mmasaki/python-glanceclient,varunarya10/python-glanceclient,JioCloud/python-glanceclient | <REPLACE_OLD> <REPLACE_NEW> # Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
<REPLACE_END> <|endoftext|> # Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import testtools
from glanceclient.common import base
class TestBase(testtools.TestCase):
def test_resource_repr(self):
r = base.Resource(None, dict(foo="bar", baz="spam"))
self.assertEqual(repr(r), "<Resource baz=spam, foo=bar>")
def test_getid(self):
self.assertEqual(base.getid(4), 4)
class TmpObject(object):
id = 4
self.assertEqual(base.getid(TmpObject), 4)
def test_two_resources_with_same_id_are_equal(self):
# Two resources of the same type with the same id: equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 1, 'name': 'hello'})
self.assertEqual(r1, r2)
def test_two_resources_with_eq_info_are_equal(self):
# Two resources with no ID: equal if their info is equal
r1 = base.Resource(None, {'name': 'joe', 'age': 12})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertEqual(r1, r2)
def test_two_resources_with_diff_id_are_not_equal(self):
# Two resources with diff ID: not equal
r1 = base.Resource(None, {'id': 1, 'name': 'hi'})
r2 = base.Resource(None, {'id': 2, 'name': 'hello'})
self.assertNotEqual(r1, r2)
def test_two_resources_with_not_eq_info_are_not_equal(self):
# Two resources with no ID: not equal if their info is not equal
r1 = base.Resource(None, {'name': 'bill', 'age': 21})
r2 = base.Resource(None, {'name': 'joe', 'age': 12})
self.assertNotEqual(r1, r2)
| Improve unit tests for python-glanceclient.glanceclient.common.base
Add several tests for glanceclient.common.base module
Fixes: bug #1144158
Change-Id: Ifc288075c79849ee1384f09f513874ee08cd0248
|
|
4f0415f5cb7f8322a0738cb1d55c7102464d3aef | openedx/core/djangoapps/discussions/tests/test_views.py | openedx/core/djangoapps/discussions/tests/test_views.py | """
Test app view logic
"""
# pylint: disable=test-inherits-tests
import unittest
from django.conf import settings
from django.urls import reverse
from opaque_keys.edx.keys import CourseKey
from rest_framework import status
from rest_framework.test import APITestCase
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.factories import StaffFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS')
class ApiTest(APITestCase):
"""
Test basic API operations
"""
def setUp(self):
super().setUp()
self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured')
self.url = reverse(
'discussions',
kwargs={
'course_key_string': str(self.course_key),
}
)
self.password = 'password'
self.user_student = UserFactory(username='dummy', password=self.password)
self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password)
self.user_staff_global = GlobalStaffFactory(password=self.password)
class UnauthorizedApiTest(ApiTest):
"""
Logged-out users should _not_ have any access
"""
expected_response_code = status.HTTP_401_UNAUTHORIZED
def test_access_get(self):
response = self.client.get(self.url)
assert response.status_code == self.expected_response_code
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == self.expected_response_code
def test_access_post(self):
response = self.client.post(self.url)
assert response.status_code == self.expected_response_code
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == self.expected_response_code
class AuthenticatedApiTest(UnauthorizedApiTest):
"""
Logged-in users should _not_ have any access
"""
expected_response_code = status.HTTP_403_FORBIDDEN
def setUp(self):
super().setUp()
self._login()
def _login(self):
self.client.login(username=self.user_student.username, password=self.password)
class AuthorizedApiTest(AuthenticatedApiTest):
"""
Global Staff should have access to all supported methods
"""
expected_response_code = status.HTTP_200_OK
def _login(self):
self.client.login(username=self.user_staff_global.username, password=self.password)
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
| Add tests for discussions API access | test: Add tests for discussions API access
This checks for expected API access [1];
data integrity will be checked later [2].
This work exposes that the code currently does _not_ grant access to
_course_ staff, only _global_ staff. This is being addressed next [3].
Fix: TNL-8229 [1]
- [1] https://openedx.atlassian.net/browse/TNL-8229
- [2] https://openedx.atlassian.net/browse/TNL-8230
- [3] https://openedx.atlassian.net/browse/TNL-8231
| Python | agpl-3.0 | edx/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,edx/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,edx/edx-platform,edx/edx-platform,angelapper/edx-platform | <REPLACE_OLD> <REPLACE_NEW> """
Test app view logic
"""
# pylint: disable=test-inherits-tests
import unittest
from django.conf import settings
from django.urls import reverse
from opaque_keys.edx.keys import CourseKey
from rest_framework import status
from rest_framework.test import APITestCase
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.factories import StaffFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS')
class ApiTest(APITestCase):
"""
Test basic API operations
"""
def setUp(self):
super().setUp()
self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured')
self.url = reverse(
'discussions',
kwargs={
'course_key_string': str(self.course_key),
}
)
self.password = 'password'
self.user_student = UserFactory(username='dummy', password=self.password)
self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password)
self.user_staff_global = GlobalStaffFactory(password=self.password)
class UnauthorizedApiTest(ApiTest):
"""
Logged-out users should _not_ have any access
"""
expected_response_code = status.HTTP_401_UNAUTHORIZED
def test_access_get(self):
response = self.client.get(self.url)
assert response.status_code == self.expected_response_code
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == self.expected_response_code
def test_access_post(self):
response = self.client.post(self.url)
assert response.status_code == self.expected_response_code
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == self.expected_response_code
class AuthenticatedApiTest(UnauthorizedApiTest):
"""
Logged-in users should _not_ have any access
"""
expected_response_code = status.HTTP_403_FORBIDDEN
def setUp(self):
super().setUp()
self._login()
def _login(self):
self.client.login(username=self.user_student.username, password=self.password)
class AuthorizedApiTest(AuthenticatedApiTest):
"""
Global Staff should have access to all supported methods
"""
expected_response_code = status.HTTP_200_OK
def _login(self):
self.client.login(username=self.user_staff_global.username, password=self.password)
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
<REPLACE_END> <|endoftext|> """
Test app view logic
"""
# pylint: disable=test-inherits-tests
import unittest
from django.conf import settings
from django.urls import reverse
from opaque_keys.edx.keys import CourseKey
from rest_framework import status
from rest_framework.test import APITestCase
from common.djangoapps.student.tests.factories import UserFactory
from lms.djangoapps.courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.courseware.tests.factories import StaffFactory
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'URLs are only configured in LMS')
class ApiTest(APITestCase):
"""
Test basic API operations
"""
def setUp(self):
super().setUp()
self.course_key = CourseKey.from_string('course-v1:Test+Course+Configured')
self.url = reverse(
'discussions',
kwargs={
'course_key_string': str(self.course_key),
}
)
self.password = 'password'
self.user_student = UserFactory(username='dummy', password=self.password)
self.user_staff_course = StaffFactory(course_key=self.course_key, password=self.password)
self.user_staff_global = GlobalStaffFactory(password=self.password)
class UnauthorizedApiTest(ApiTest):
"""
Logged-out users should _not_ have any access
"""
expected_response_code = status.HTTP_401_UNAUTHORIZED
def test_access_get(self):
response = self.client.get(self.url)
assert response.status_code == self.expected_response_code
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == self.expected_response_code
def test_access_post(self):
response = self.client.post(self.url)
assert response.status_code == self.expected_response_code
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == self.expected_response_code
class AuthenticatedApiTest(UnauthorizedApiTest):
"""
Logged-in users should _not_ have any access
"""
expected_response_code = status.HTTP_403_FORBIDDEN
def setUp(self):
super().setUp()
self._login()
def _login(self):
self.client.login(username=self.user_student.username, password=self.password)
class AuthorizedApiTest(AuthenticatedApiTest):
"""
Global Staff should have access to all supported methods
"""
expected_response_code = status.HTTP_200_OK
def _login(self):
self.client.login(username=self.user_staff_global.username, password=self.password)
def test_access_patch(self):
response = self.client.patch(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
def test_access_put(self):
response = self.client.put(self.url)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
| test: Add tests for discussions API access
This checks for expected API access [1];
data integrity will be checked later [2].
This work exposes that the code currently does _not_ grant access to
_course_ staff, only _global_ staff. This is being addressed next [3].
Fix: TNL-8229 [1]
- [1] https://openedx.atlassian.net/browse/TNL-8229
- [2] https://openedx.atlassian.net/browse/TNL-8230
- [3] https://openedx.atlassian.net/browse/TNL-8231
|
|
4efdee1f93e85b96607a21c0d8f79343ef989697 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = '[email protected]',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
| from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = '[email protected]',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
| Make mpi4py required for this package. | Make mpi4py required for this package.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | <REPLACE_OLD> install_requires=['pymongo'],
<REPLACE_NEW> install_requires=['pymongo', 'mpi4py'],
<REPLACE_END> <|endoftext|> from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = '[email protected]',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo', 'mpi4py'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
| Make mpi4py required for this package.
from setuptools import setup, find_packages
setup(
name = 'compdb',
version = '0.1',
package_dir = {'': 'src'},
packages = find_packages('src'),
author = 'Carl Simon Adorf',
author_email = '[email protected]',
description = "Computational Database.",
keywords = 'simulation tools mc md monte-carlo mongodb jobmanagement materials database',
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering :: Physics",
],
install_requires=['pymongo'],
entry_points = {
'console_scripts': [
'compdb = compdb.contrib.script:main',
'compdb_init = compdb.contrib.init_project:main',
'compdb_configure = compdb.contrib.configure:main',
],
},
)
|
f94bc30004aa9977bac652d337f69069efc132bd | marmoset/pxe/__init__.py | marmoset/pxe/__init__.py | from .label import Label
from .client_config import ClientConfig
def create(args):
pxe_client = ClientConfig(args.ip_address, args.password, args.script)
pxe_client.create(Label.find(args.label))
msg = 'Created %s with password %s'
print(msg % (pxe_client.file_path(), pxe_client.password))
def list(args):
for pxe_client in ClientConfig.all():
print('%s: %s' % (pxe_client.ip_address, pxe_client.label))
def remove(args):
pxe_client = ClientConfig(args.ip_address)
if pxe_client.remove():
print('Removed', pxe_client.file_path())
else:
print('No entry found for', pxe_client.ip_address)
| from .label import Label
from .client_config import ClientConfig
def create(args):
pxe_client = ClientConfig(args.ip_address, args.password, args.script)
used_options = pxe_client.create(Label.find(args.label))
msg = 'Created %s with following Options:'
print(msg % pxe_client.file_path())
for option in used_options:
print("\t%s" % option)
def list(args):
for pxe_client in ClientConfig.all():
print('%s: %s' % (pxe_client.ip_address, pxe_client.label))
def remove(args):
pxe_client = ClientConfig(args.ip_address)
if pxe_client.remove():
print('Removed', pxe_client.file_path())
else:
print('No entry found for', pxe_client.ip_address)
| Implement better result output for pxe config file crete | Implement better result output for pxe config file crete
| Python | agpl-3.0 | aibor/marmoset | <REPLACE_OLD> pxe_client.create(Label.find(args.label))
<REPLACE_NEW> used_options = pxe_client.create(Label.find(args.label))
<REPLACE_END> <REPLACE_OLD> password %s'
<REPLACE_NEW> following Options:'
<REPLACE_END> <REPLACE_OLD> (pxe_client.file_path(), pxe_client.password))
def <REPLACE_NEW> pxe_client.file_path())
for option in used_options:
print("\t%s" % option)
def <REPLACE_END> <|endoftext|> from .label import Label
from .client_config import ClientConfig
def create(args):
pxe_client = ClientConfig(args.ip_address, args.password, args.script)
used_options = pxe_client.create(Label.find(args.label))
msg = 'Created %s with following Options:'
print(msg % pxe_client.file_path())
for option in used_options:
print("\t%s" % option)
def list(args):
for pxe_client in ClientConfig.all():
print('%s: %s' % (pxe_client.ip_address, pxe_client.label))
def remove(args):
pxe_client = ClientConfig(args.ip_address)
if pxe_client.remove():
print('Removed', pxe_client.file_path())
else:
print('No entry found for', pxe_client.ip_address)
| Implement better result output for pxe config file crete
from .label import Label
from .client_config import ClientConfig
def create(args):
pxe_client = ClientConfig(args.ip_address, args.password, args.script)
pxe_client.create(Label.find(args.label))
msg = 'Created %s with password %s'
print(msg % (pxe_client.file_path(), pxe_client.password))
def list(args):
for pxe_client in ClientConfig.all():
print('%s: %s' % (pxe_client.ip_address, pxe_client.label))
def remove(args):
pxe_client = ClientConfig(args.ip_address)
if pxe_client.remove():
print('Removed', pxe_client.file_path())
else:
print('No entry found for', pxe_client.ip_address)
|
390851ce7c606e803094487e6278ea5620d26f3c | src/python/vff.py | src/python/vff.py | """Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['.']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| """Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['~/tmp']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| Put temp files in ~/tmp by default | Put temp files in ~/tmp by default
| Python | mit | jalanb/jab,jalanb/dotjab,jalanb/jab,jalanb/dotjab | <REPLACE_OLD> ['.']
<REPLACE_NEW> ['~/tmp']
<REPLACE_END> <|endoftext|> """Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['~/tmp']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| Put temp files in ~/tmp by default
"""Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['.']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
212b5a126e464ff46e60e00846bbb87a2de3fbb2 | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the sample proxy list | Update the sample proxy list
| Python | mit | mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | <DELETE> "example3": "82.200.233.4:3128", # (Example) - set your own proxy here
<DELETE_END> <|endoftext|> """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the sample proxy list
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
4f2743ed845185de718763df6d26db390ee2eb48 | test_putget.py | test_putget.py | from multiprocessing import Process, Queue
q = Queue()
iterations = 10000000
def produce(q):
for i in range(iterations):
q.put(i)
if __name__ == "__main__":
t = Process(target=produce, args=(q,))
t.start()
previous = -1
for i in range(iterations):
m = q.get()
if m != previous + 1:
print "Fail at:", previous, m
break
previous = m
print "done"
| Add equivalent put/get test in python. | Add equivalent put/get test in python.
| Python | mit | abwilson/L3,abwilson/L3,tempbottle/L3,tempbottle/L3 | <INSERT> from multiprocessing import Process, Queue
q = Queue()
iterations = 10000000
def produce(q):
<INSERT_END> <INSERT> for i in range(iterations):
q.put(i)
if __name__ == "__main__":
t = Process(target=produce, args=(q,))
t.start()
previous = -1
for i in range(iterations):
m = q.get()
if m != previous + 1:
print "Fail at:", previous, m
break
previous = m
print "done"
<INSERT_END> <|endoftext|> from multiprocessing import Process, Queue
q = Queue()
iterations = 10000000
def produce(q):
for i in range(iterations):
q.put(i)
if __name__ == "__main__":
t = Process(target=produce, args=(q,))
t.start()
previous = -1
for i in range(iterations):
m = q.get()
if m != previous + 1:
print "Fail at:", previous, m
break
previous = m
print "done"
| Add equivalent put/get test in python.
|
|
f7b1d233ed39eed24e3c1489738df01f700112e3 | tensorflow/contrib/tensorrt/__init__.py | tensorflow/contrib/tensorrt/__init__.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
try:
from tensorflow.contrib.tensorrt.python import * # pylint: disable=import-not-at-top
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
' installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import,g-import-not-at-top
try:
from tensorflow.contrib.tensorrt.python import *
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have'
' it installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import,g-import-not-at-top
| Move the pylint message and fix comment length | Move the pylint message and fix comment length
| Python | apache-2.0 | paolodedios/tensorflow,lukeiwanski/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,frreiss/tensorflow-fred,Xeralux/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,nburn42/tensorflow,meteorcloudy/tensorflow,Xeralux/tensorflow,paolodedios/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,girving/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,DavidNorman/tensorflow,seanli9jan/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,apark263/tensorflow,gojira/tensorflow,xodus7/tensorflow,jalexvig/tensorflow,allenlavoie/tensorflow,apark263/tensorflow,Intel-Corporation/tensorflow,hehongliang/tensorflow,nburn42/tensorflow,jart/tensorflow,ZhangXinNan/tensorflow,theflofly/tensorflow,caisq/tensorflow,xzturn/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,alsrgv/tensorflow,AnishShah/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,yongtang/tensorflow,arborh/tensorflow,alshedivat/tensorflow,freedomtan/tensorflow,alsrgv/tensorflow,dongjoon-hyun/tensorflow,gojira/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,ppwwyyxx/tensorflow,nburn42/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,gojira/tensorflow,girving/tensorflow,AnishShah/tensorflow,manipopopo/tensorflow,snnn/tensorflow,xzturn/tensorflow,alshedivat/tensorflow,gautam1858/tensorflow,jendap/tensorflow,jendap/tensorflow,snnn/tensorflow,gunan/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gunan/tensorflow,jalexvig/tensorflow,dancingdan/tensorflow,lukeiwanski/tensorflow,xzturn/tensorflow,gunan/tensorflow,lukeiwanski/tensorflow,jalexvig/tensorflow,dendisuhubdy/tensorflow,alshedivat/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,eaplatanios/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,ppwwyyxx/tensorflow,aam-at/tensorflow,aselle/tensorflow,yongtang/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,ppwwyyxx/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hehongliang/tensorflow,dendisuhubdy/tensorflow,xzturn/tensorflow,jhseu/tensorflow,jendap/tensorflow,xzturn/tensorflow,girving/tensorflow,aam-at/tensorflow,karllessard/tensorflow,ageron/tensorflow,apark263/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,AnishShah/tensorflow,alsrgv/tensorflow,chemelnucfin/tensorflow,chemelnucfin/tensorflow,theflofly/tensorflow,kevin-coder/tensorflow-fork,gojira/tensorflow,Xeralux/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Bismarrck/tensorflow,jbedorf/tensorflow,allenlavoie/tensorflow,girving/tensorflow,gautam1858/tensorflow,snnn/tensorflow,kevin-coder/tensorflow-fork,manipopopo/tensorflow,caisq/tensorflow,jart/tensorflow,aam-at/tensorflow,eaplatanios/tensorflow,petewarden/tensorflow,arborh/tensorflow,jart/tensorflow,snnn/tensorflow,caisq/tensorflow,annarev/tensorflow,ageron/tensorflow,xodus7/tensorflow,asimshankar/tensorflow,ghchinoy/tensorflow,yongtang/tensorflow,yanchen036/tensorflow,xzturn/tensorflow,chemelnucfin/tensorflow,yongtang/tensorflow,aselle/tensorflow,alsrgv/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,ZhangXinNan/tensorflow,annarev/tensorflow,gojira/tensorflow,paolodedios/tensorflow,gunan/tensorflow,aam-at/tensorflow,theflofly/tensorflow,drpngx/tensorflow,arborh/tensorflow,snnn/tensorflow,jart/tensorflow,xodus7/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,dendisuhubdy/tensorflow,jendap/tensorflow,adit-chandra/tensorflow,apark263/tensorflow,seanli9jan/tensorflow,alshedivat/tensorflow,kevin-coder/tensorflow-fork,nburn42/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,lukeiwanski/tensorflow,hfp/tensorflow-xsmm,gunan/tensorflow,eaplatanios/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,allenlavoie/tensorflow,dongjoon-hyun/tensorflow,xzturn/tensorflow,aldian/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,hfp/tensorflow-xsmm,yongtang/tensorflow,aam-at/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,chemelnucfin/tensorflow,xodus7/tensorflow,caisq/tensorflow,arborh/tensorflow,arborh/tensorflow,dendisuhubdy/tensorflow,ageron/tensorflow,jhseu/tensorflow,allenlavoie/tensorflow,xodus7/tensorflow,kobejean/tensorflow,aselle/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,AnishShah/tensorflow,dendisuhubdy/tensorflow,paolodedios/tensorflow,AnishShah/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,meteorcloudy/tensorflow,apark263/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,manipopopo/tensorflow,kevin-coder/tensorflow-fork,girving/tensorflow,drpngx/tensorflow,sarvex/tensorflow,caisq/tensorflow,jhseu/tensorflow,kobejean/tensorflow,gunan/tensorflow,asimshankar/tensorflow,Xeralux/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,ZhangXinNan/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,ageron/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,cxxgtxy/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,girving/tensorflow,manipopopo/tensorflow,kobejean/tensorflow,ZhangXinNan/tensorflow,brchiu/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,annarev/tensorflow,xodus7/tensorflow,ageron/tensorflow,aselle/tensorflow,caisq/tensorflow,dendisuhubdy/tensorflow,DavidNorman/tensorflow,eaplatanios/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,dongjoon-hyun/tensorflow,dancingdan/tensorflow,kobejean/tensorflow,aldian/tensorflow,renyi533/tensorflow,nburn42/tensorflow,davidzchen/tensorflow,annarev/tensorflow,ghchinoy/tensorflow,allenlavoie/tensorflow,alsrgv/tensorflow,petewarden/tensorflow,eaplatanios/tensorflow,gunan/tensorflow,Xeralux/tensorflow,tensorflow/tensorflow,apark263/tensorflow,jbedorf/tensorflow,brchiu/tensorflow,yanchen036/tensorflow,snnn/tensorflow,nburn42/tensorflow,benoitsteiner/tensorflow-xsmm,alsrgv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,yanchen036/tensorflow,aselle/tensorflow,yongtang/tensorflow,AnishShah/tensorflow,meteorcloudy/tensorflow,drpngx/tensorflow,jhseu/tensorflow,sarvex/tensorflow,Intel-tensorflow/tensorflow,manipopopo/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,jendap/tensorflow,davidzchen/tensorflow,meteorcloudy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xodus7/tensorflow,hehongliang/tensorflow,meteorcloudy/tensorflow,girving/tensorflow,nburn42/tensorflow,hfp/tensorflow-xsmm,alsrgv/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,DavidNorman/tensorflow,jart/tensorflow,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,xzturn/tensorflow,manipopopo/tensorflow,kobejean/tensorflow,benoitsteiner/tensorflow-xsmm,allenlavoie/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,drpngx/tensorflow,chemelnucfin/tensorflow,aam-at/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,AnishShah/tensorflow,karllessard/tensorflow,xodus7/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,seanli9jan/tensorflow,dongjoon-hyun/tensorflow,aldian/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,asimshankar/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,Xeralux/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,hehongliang/tensorflow,aselle/tensorflow,brchiu/tensorflow,lukeiwanski/tensorflow,eaplatanios/tensorflow,meteorcloudy/tensorflow,Xeralux/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,Bismarrck/tensorflow,brchiu/tensorflow,AnishShah/tensorflow,hfp/tensorflow-xsmm,gojira/tensorflow,aselle/tensorflow,jbedorf/tensorflow,benoitsteiner/tensorflow-xsmm,davidzchen/tensorflow,aselle/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Bismarrck/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,snnn/tensorflow,aselle/tensorflow,dancingdan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jalexvig/tensorflow,tensorflow/tensorflow,snnn/tensorflow,jart/tensorflow,tensorflow/tensorflow,benoitsteiner/tensorflow-xsmm,gojira/tensorflow,ppwwyyxx/tensorflow,dancingdan/tensorflow,snnn/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,Intel-Corporation/tensorflow,asimshankar/tensorflow,manipopopo/tensorflow,xzturn/tensorflow,aldian/tensorflow,arborh/tensorflow,caisq/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,ZhangXinNan/tensorflow,lukeiwanski/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,ghchinoy/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,annarev/tensorflow,ageron/tensorflow,Intel-Corporation/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,arborh/tensorflow,davidzchen/tensorflow,cxxgtxy/tensorflow,Intel-tensorflow/tensorflow,alshedivat/tensorflow,jhseu/tensorflow,annarev/tensorflow,allenlavoie/tensorflow,aam-at/tensorflow,jbedorf/tensorflow,brchiu/tensorflow,asimshankar/tensorflow,kobejean/tensorflow,kevin-coder/tensorflow-fork,meteorcloudy/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,jhseu/tensorflow,jalexvig/tensorflow,hfp/tensorflow-xsmm,eaplatanios/tensorflow,ghchinoy/tensorflow,dancingdan/tensorflow,manipopopo/tensorflow,renyi533/tensorflow,allenlavoie/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,girving/tensorflow,seanli9jan/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,gunan/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,asimshankar/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow,caisq/tensorflow,adit-chandra/tensorflow,yongtang/tensorflow,ZhangXinNan/tensorflow,theflofly/tensorflow,dancingdan/tensorflow,gautam1858/tensorflow,manipopopo/tensorflow,meteorcloudy/tensorflow,davidzchen/tensorflow,gunan/tensorflow,freedomtan/tensorflow,hfp/tensorflow-xsmm,ZhangXinNan/tensorflow,tensorflow/tensorflow,dongjoon-hyun/tensorflow,jendap/tensorflow,cxxgtxy/tensorflow,eaplatanios/tensorflow,DavidNorman/tensorflow,girving/tensorflow,Xeralux/tensorflow,ageron/tensorflow,theflofly/tensorflow,karllessard/tensorflow,benoitsteiner/tensorflow-xsmm,alshedivat/tensorflow,seanli9jan/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow,caisq/tensorflow,Intel-tensorflow/tensorflow,alshedivat/tensorflow,alsrgv/tensorflow,yanchen036/tensorflow,theflofly/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,ppwwyyxx/tensorflow,gunan/tensorflow,allenlavoie/tensorflow,jart/tensorflow,freedomtan/tensorflow,lukeiwanski/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xodus7/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,aam-at/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,drpngx/tensorflow,cxxgtxy/tensorflow,hehongliang/tensorflow,Xeralux/tensorflow,hfp/tensorflow-xsmm,gojira/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,brchiu/tensorflow,brchiu/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,Bismarrck/tensorflow,caisq/tensorflow,sarvex/tensorflow,jhseu/tensorflow,dendisuhubdy/tensorflow,karllessard/tensorflow,ZhangXinNan/tensorflow,Bismarrck/tensorflow,alsrgv/tensorflow,snnn/tensorflow,jhseu/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,jart/tensorflow,benoitsteiner/tensorflow-xsmm,Intel-tensorflow/tensorflow,annarev/tensorflow,paolodedios/tensorflow,jalexvig/tensorflow,sarvex/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,kevin-coder/tensorflow-fork,frreiss/tensorflow-fred,nburn42/tensorflow,gojira/tensorflow,gojira/tensorflow,alshedivat/tensorflow,davidzchen/tensorflow,girving/tensorflow,AnishShah/tensorflow,drpngx/tensorflow,freedomtan/tensorflow,ZhangXinNan/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,apark263/tensorflow,annarev/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,lukeiwanski/tensorflow,asimshankar/tensorflow,chemelnucfin/tensorflow,renyi533/tensorflow,dancingdan/tensorflow,allenlavoie/tensorflow,jart/tensorflow,arborh/tensorflow,aam-at/tensorflow,yanchen036/tensorflow,drpngx/tensorflow,sarvex/tensorflow,apark263/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,eaplatanios/tensorflow,benoitsteiner/tensorflow-xsmm,Xeralux/tensorflow,petewarden/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_saved_model,renyi533/tensorflow,yanchen036/tensorflow,apark263/tensorflow,Intel-tensorflow/tensorflow,AnishShah/tensorflow,paolodedios/tensorflow,xzturn/tensorflow,alshedivat/tensorflow,yongtang/tensorflow,jbedorf/tensorflow,aam-at/tensorflow,renyi533/tensorflow,jalexvig/tensorflow,adit-chandra/tensorflow,dongjoon-hyun/tensorflow,hehongliang/tensorflow,nburn42/tensorflow,jhseu/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,ZhangXinNan/tensorflow,dancingdan/tensorflow,manipopopo/tensorflow,theflofly/tensorflow,gojira/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Bismarrck/tensorflow,apark263/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,seanli9jan/tensorflow,alsrgv/tensorflow,brchiu/tensorflow,karllessard/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,jbedorf/tensorflow,jart/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,theflofly/tensorflow,petewarden/tensorflow,drpngx/tensorflow,annarev/tensorflow,petewarden/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,arborh/tensorflow,ppwwyyxx/tensorflow,dongjoon-hyun/tensorflow,jendap/tensorflow,freedomtan/tensorflow,drpngx/tensorflow,davidzchen/tensorflow,meteorcloudy/tensorflow,kevin-coder/tensorflow-fork,aldian/tensorflow,arborh/tensorflow,Bismarrck/tensorflow,kobejean/tensorflow,renyi533/tensorflow,benoitsteiner/tensorflow-xsmm,eaplatanios/tensorflow,karllessard/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,jendap/tensorflow,karllessard/tensorflow,Bismarrck/tensorflow,sarvex/tensorflow,aldian/tensorflow,asimshankar/tensorflow,snnn/tensorflow,jalexvig/tensorflow,sarvex/tensorflow,ZhangXinNan/tensorflow,eaplatanios/tensorflow,dancingdan/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow,ghchinoy/tensorflow,jbedorf/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,asimshankar/tensorflow,theflofly/tensorflow,seanli9jan/tensorflow,meteorcloudy/tensorflow,manipopopo/tensorflow,aldian/tensorflow,lukeiwanski/tensorflow,jendap/tensorflow,dendisuhubdy/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,lukeiwanski/tensorflow,tensorflow/tensorflow,brchiu/tensorflow,petewarden/tensorflow,ppwwyyxx/tensorflow,paolodedios/tensorflow,alshedivat/tensorflow,ageron/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aselle/tensorflow,brchiu/tensorflow,adit-chandra/tensorflow,jhseu/tensorflow,annarev/tensorflow,Xeralux/tensorflow,frreiss/tensorflow-fred,chemelnucfin/tensorflow,Intel-Corporation/tensorflow,dendisuhubdy/tensorflow | <REPLACE_OLD> disable=unused-import,wildcard-import
try:
<REPLACE_NEW> disable=unused-import,wildcard-import,g-import-not-at-top
try:
<REPLACE_END> <REPLACE_OLD> * # pylint: disable=import-not-at-top
except <REPLACE_NEW> *
except <REPLACE_END> <REPLACE_OLD> have it'
<REPLACE_NEW> have'
<REPLACE_END> <INSERT> it <INSERT_END> <REPLACE_OLD> enable=unused-import,wildcard-import
<REPLACE_NEW> enable=unused-import,wildcard-import,g-import-not-at-top
<REPLACE_END> <|endoftext|> # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import,g-import-not-at-top
try:
from tensorflow.contrib.tensorrt.python import *
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have'
' it installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import,g-import-not-at-top
| Move the pylint message and fix comment length
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the python wrapper for TensorRT graph transforms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,wildcard-import
try:
from tensorflow.contrib.tensorrt.python import * # pylint: disable=import-not-at-top
except Exception as e:
no_trt_message = (
'**** Failed to initialize TensorRT. This is either because the TensorRT'
' installation path is not in LD_LIBRARY_PATH, or because you do not have it'
' installed. If not installed, please go to'
' https://developer.nvidia.com/tensorrt to download and install'
' TensorRT ****')
print(no_trt_message)
raise e
# pylint: enable=unused-import,wildcard-import
|
d633d3c13a958b279b93d09142a772e59c798f6f | peas-demo/plugins/pythonhello/pythonhello.py | peas-demo/plugins/pythonhello/pythonhello.py | # -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
| # -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
| Fix a typo in the python plugin. | [PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.
| Python | lgpl-2.1 | GNOME/libpeas,gregier/libpeas,Distrotech/libpeas,chergert/libpeas,GNOME/libpeas,chergert/libpeas,gregier/libpeas,chergert/libpeas,gregier/libpeas,gregier/libpeas,Distrotech/libpeas,Distrotech/libpeas | <REPLACE_OLD> "PythonHelloPlugin.do_activate", <REPLACE_NEW> "PythonHelloPlugin.do_deactivate", <REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_deactivate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
| [PeasDemo] Fix a typo in the python plugin.
It was indicating "do_activate" in the console when actually
deactivating the plugin.
# -*- coding: utf-8 -*-
# ex:set ts=4 et sw=4 ai:
import gobject
import libpeas
import gtk
LABEL_STRING="Python Says Hello!"
class PythonHelloPlugin(libpeas.Plugin):
def do_activate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window._pythonhello_label = gtk.Label(LABEL_STRING)
window._pythonhello_label.show()
window.get_child().pack_start(window._pythonhello_label)
def do_deactivate(self, window):
print "PythonHelloPlugin.do_activate", repr(window)
window.get_child().remove(window._pythonhello_label)
window._pythonhello_label.destroy()
gobject.type_register(PythonHelloPlugin)
|
2a32fc912a5839f627a216918e4671e6547ee53b | tests/utils/driver.py | tests/utils/driver.py | import os
from importlib import import_module
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, type, *args, **kwargs):
if type not in cls.drivers:
try:
mod = import_module('onitu.drivers.{}.tests.driver'.
format(type))
except ImportError:
raise KeyError("No such driver {}".format(repr(type)))
cls.drivers[type] = mod.Driver
return cls.drivers[type](*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
| import os
import pkg_resources
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, name, *args, **kwargs):
entry_points = pkg_resources.iter_entry_points('onitu.tests')
tests_modules = {e.name: e for e in entry_points}
if name not in tests_modules:
raise ImportError(
"Cannot import tests for driver {}".format(name)
)
try:
tests = tests_modules[name].load()
except ImportError as e:
raise ImportError(
"Error importing tests for driver {}: {}".format(name, e)
)
try:
driver = tests.Driver
except ImportError:
raise ImportError(
"Tests for driver {} don't expose a"
"Driver class".format(name)
)
cls.drivers[name] = driver
return driver(*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
| Load tests helpers using entry_points | Load tests helpers using entry_points
| Python | mit | onitu/onitu,onitu/onitu,onitu/onitu | <REPLACE_OLD> os
from importlib import import_module
from <REPLACE_NEW> os
import pkg_resources
from <REPLACE_END> <REPLACE_OLD> type, <REPLACE_NEW> name, <REPLACE_END> <INSERT> entry_points = pkg_resources.iter_entry_points('onitu.tests')
tests_modules = {e.name: e for e in entry_points}
<INSERT_END> <REPLACE_OLD> type <REPLACE_NEW> name <REPLACE_END> <REPLACE_OLD> cls.drivers:
<REPLACE_NEW> tests_modules:
raise ImportError(
"Cannot import tests for driver {}".format(name)
)
<REPLACE_END> <REPLACE_OLD> mod = import_module('onitu.drivers.{}.tests.driver'.
format(type))
<REPLACE_NEW> tests = tests_modules[name].load()
except ImportError as e:
raise ImportError(
"Error importing tests for driver {}: {}".format(name, e)
)
try:
driver = tests.Driver
<REPLACE_END> <DELETE> <DELETE_END> <REPLACE_OLD> KeyError("No such <REPLACE_NEW> ImportError(
"Tests for <REPLACE_END> <REPLACE_OLD> {}".format(repr(type)))
cls.drivers[type] = mod.Driver
<REPLACE_NEW> {} don't expose a"
"Driver class".format(name)
)
cls.drivers[name] = driver
<REPLACE_END> <REPLACE_OLD> cls.drivers[type](*args, <REPLACE_NEW> driver(*args, <REPLACE_END> <|endoftext|> import os
import pkg_resources
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, name, *args, **kwargs):
entry_points = pkg_resources.iter_entry_points('onitu.tests')
tests_modules = {e.name: e for e in entry_points}
if name not in tests_modules:
raise ImportError(
"Cannot import tests for driver {}".format(name)
)
try:
tests = tests_modules[name].load()
except ImportError as e:
raise ImportError(
"Error importing tests for driver {}: {}".format(name, e)
)
try:
driver = tests.Driver
except ImportError:
raise ImportError(
"Tests for driver {} don't expose a"
"Driver class".format(name)
)
cls.drivers[name] = driver
return driver(*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
| Load tests helpers using entry_points
import os
from importlib import import_module
from .testdriver import TestDriver
class Driver(TestDriver):
drivers = {}
def __new__(cls, type, *args, **kwargs):
if type not in cls.drivers:
try:
mod = import_module('onitu.drivers.{}.tests.driver'.
format(type))
except ImportError:
raise KeyError("No such driver {}".format(repr(type)))
cls.drivers[type] = mod.Driver
return cls.drivers[type](*args, **kwargs)
class LocalStorageDriver(TestDriver):
def __new__(cls, *args, **kwargs):
return Driver('local_storage', *args, **kwargs)
class TargetDriver(Driver):
def __new__(cls, *args, **kwargs):
type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage')
return Driver(type, *args, **kwargs)
|
19df6de71721db1a4d7b43e360731704ba462d9d | tests/services/user/test_find_user.py | tests/services/user/test_find_user.py | """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.user import service as user_service
from tests.conftest import database_recreated
from tests.helpers import create_user
@pytest.fixture(scope='module')
def app(party_app, db):
with party_app.app_context():
with database_recreated(db):
yield party_app
@pytest.fixture(scope='module')
def user():
return create_user(
'CarmenSandiego', email_address='[email protected]'
)
def test_find_user_by_email_address_non_lowercase(app, user):
actual = user_service.find_user_by_email_address(
'[email protected]'
)
assert actual is not None
assert actual.email_address == '[email protected]'
def test_find_user_by_email_address_unknown(app, user):
actual = user_service.find_user_by_email_address('[email protected]')
assert actual is None
def test_find_user_by_screen_name_case_sensitive_match(app, user):
actual = user_service.find_user_by_screen_name('CarmenSandiego')
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_sensitive_miss(app, user):
actual = user_service.find_user_by_screen_name('cARMENsANDIEGO')
assert actual is None
def test_find_user_by_screen_name_case_insensitive_match(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGO', case_insensitive=True
)
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_insensitive_miss(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGOx', case_insensitive=True
)
assert actual is None
def test_find_user_by_screen_name_unknown(app, user):
actual = user_service.find_user_by_screen_name('Dunno')
assert actual is None
| Test finding user by email address, screen name | Test finding user by email address, screen name
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | <REPLACE_OLD> <REPLACE_NEW> """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.user import service as user_service
from tests.conftest import database_recreated
from tests.helpers import create_user
@pytest.fixture(scope='module')
def app(party_app, db):
with party_app.app_context():
with database_recreated(db):
yield party_app
@pytest.fixture(scope='module')
def user():
return create_user(
'CarmenSandiego', email_address='[email protected]'
)
def test_find_user_by_email_address_non_lowercase(app, user):
actual = user_service.find_user_by_email_address(
'[email protected]'
)
assert actual is not None
assert actual.email_address == '[email protected]'
def test_find_user_by_email_address_unknown(app, user):
actual = user_service.find_user_by_email_address('[email protected]')
assert actual is None
def test_find_user_by_screen_name_case_sensitive_match(app, user):
actual = user_service.find_user_by_screen_name('CarmenSandiego')
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_sensitive_miss(app, user):
actual = user_service.find_user_by_screen_name('cARMENsANDIEGO')
assert actual is None
def test_find_user_by_screen_name_case_insensitive_match(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGO', case_insensitive=True
)
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_insensitive_miss(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGOx', case_insensitive=True
)
assert actual is None
def test_find_user_by_screen_name_unknown(app, user):
actual = user_service.find_user_by_screen_name('Dunno')
assert actual is None
<REPLACE_END> <|endoftext|> """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import pytest
from byceps.services.user import service as user_service
from tests.conftest import database_recreated
from tests.helpers import create_user
@pytest.fixture(scope='module')
def app(party_app, db):
with party_app.app_context():
with database_recreated(db):
yield party_app
@pytest.fixture(scope='module')
def user():
return create_user(
'CarmenSandiego', email_address='[email protected]'
)
def test_find_user_by_email_address_non_lowercase(app, user):
actual = user_service.find_user_by_email_address(
'[email protected]'
)
assert actual is not None
assert actual.email_address == '[email protected]'
def test_find_user_by_email_address_unknown(app, user):
actual = user_service.find_user_by_email_address('[email protected]')
assert actual is None
def test_find_user_by_screen_name_case_sensitive_match(app, user):
actual = user_service.find_user_by_screen_name('CarmenSandiego')
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_sensitive_miss(app, user):
actual = user_service.find_user_by_screen_name('cARMENsANDIEGO')
assert actual is None
def test_find_user_by_screen_name_case_insensitive_match(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGO', case_insensitive=True
)
assert actual is not None
assert actual.screen_name == 'CarmenSandiego'
def test_find_user_by_screen_name_case_insensitive_miss(app, user):
actual = user_service.find_user_by_screen_name(
'cARMENsANDIEGOx', case_insensitive=True
)
assert actual is None
def test_find_user_by_screen_name_unknown(app, user):
actual = user_service.find_user_by_screen_name('Dunno')
assert actual is None
| Test finding user by email address, screen name
|
|
c2b11e603de32d65f5f5ddf500c4e04d3bcce4fd | setup.py | setup.py | from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='[email protected]'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| Add missing metadata to suppress warning | Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.
| Python | mit | gratipay/dependency_injection.py,gratipay/dependency_injection.py | <INSERT> author_email='[email protected]'
, <INSERT_END> <|endoftext|> from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='[email protected]'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
c6d81ce7eede6db801d4e9a92b27ec5d409d0eab | setup.py | setup.py | from setuptools import setup
setup(
name='autograd',
version='1.4',
description='Efficiently computes derivatives of numpy code.',
author='Dougal Maclaurin and David Duvenaud and Matthew Johnson',
author_email="[email protected], [email protected], [email protected]",
packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'],
install_requires=['numpy>=1.12', 'future>=0.15.2'],
keywords=['Automatic differentiation', 'backpropagation', 'gradients',
'machine learning', 'optimization', 'neural networks',
'Python', 'Numpy', 'Scipy'],
url='https://github.com/HIPS/autograd',
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'],
)
| from setuptools import setup
setup(
name='autograd',
version='1.5',
description='Efficiently computes derivatives of numpy code.',
author='Dougal Maclaurin and David Duvenaud and Matthew Johnson',
author_email="[email protected], [email protected], [email protected]",
packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'],
install_requires=['numpy>=1.12', 'future>=0.15.2'],
keywords=['Automatic differentiation', 'backpropagation', 'gradients',
'machine learning', 'optimization', 'neural networks',
'Python', 'Numpy', 'Scipy'],
url='https://github.com/HIPS/autograd',
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'],
)
| Increase version number for pypi | Increase version number for pypi
| Python | mit | HIPS/autograd,HIPS/autograd | <REPLACE_OLD> version='1.4',
<REPLACE_NEW> version='1.5',
<REPLACE_END> <|endoftext|> from setuptools import setup
setup(
name='autograd',
version='1.5',
description='Efficiently computes derivatives of numpy code.',
author='Dougal Maclaurin and David Duvenaud and Matthew Johnson',
author_email="[email protected], [email protected], [email protected]",
packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'],
install_requires=['numpy>=1.12', 'future>=0.15.2'],
keywords=['Automatic differentiation', 'backpropagation', 'gradients',
'machine learning', 'optimization', 'neural networks',
'Python', 'Numpy', 'Scipy'],
url='https://github.com/HIPS/autograd',
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'],
)
| Increase version number for pypi
from setuptools import setup
setup(
name='autograd',
version='1.4',
description='Efficiently computes derivatives of numpy code.',
author='Dougal Maclaurin and David Duvenaud and Matthew Johnson',
author_email="[email protected], [email protected], [email protected]",
packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'],
install_requires=['numpy>=1.12', 'future>=0.15.2'],
keywords=['Automatic differentiation', 'backpropagation', 'gradients',
'machine learning', 'optimization', 'neural networks',
'Python', 'Numpy', 'Scipy'],
url='https://github.com/HIPS/autograd',
license='MIT',
classifiers=['Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5'],
)
|
8ba03f6be64ee12634183e0b5c5f3aa3b6014b94 | linguine/ops/StanfordCoreNLP.py | linguine/ops/StanfordCoreNLP.py | #!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
def __init__(self):
# I don't see anywhere to put properties like this path...
# For now it's hardcoded and would need to be changed when deployed...
print "Some stuff"
print os.path.abspath(__file__)
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
print coreNLPPath
self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath])
def run(self, data):
results = []
for corpus in data:
results.append(self.proc.parse_doc(corpus.contents))
return results
| #!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
def __init__(self):
# I don't see anywhere to put properties like this path...
# For now it's hardcoded and would need to be changed when deployed...
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
results = []
for corpus in data:
results.append(self.proc.parse_doc(corpus.contents))
return results
| Add coreNLP models jar relative path as well | Add coreNLP models jar relative path as well
| Python | mit | rigatoni/linguine-python,Pastafarians/linguine-python | <REPLACE_OLD> print "Some stuff"
print os.path.abspath(__file__)
<REPLACE_NEW>
<REPLACE_END> <REPLACE_OLD> print coreNLPPath
<REPLACE_NEW> coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
<REPLACE_END> <REPLACE_OLD> corenlp_jars=[coreNLPPath])
<REPLACE_NEW> corenlp_jars=[coreNLPPath, coreNLPModelsPath])
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
def __init__(self):
# I don't see anywhere to put properties like this path...
# For now it's hardcoded and would need to be changed when deployed...
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
results = []
for corpus in data:
results.append(self.proc.parse_doc(corpus.contents))
return results
| Add coreNLP models jar relative path as well
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
def __init__(self):
# I don't see anywhere to put properties like this path...
# For now it's hardcoded and would need to be changed when deployed...
print "Some stuff"
print os.path.abspath(__file__)
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
print coreNLPPath
self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath])
def run(self, data):
results = []
for corpus in data:
results.append(self.proc.parse_doc(corpus.contents))
return results
|
972cb7c234729d2ce8bbab0937f8efbfe18a2eeb | lab_members/models.py | lab_members/models.py | from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
pass
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
pass
| from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
return self.title
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
return self.full_name
| Fix error: __str__ returned non-string (type NoneType) | Fix error: __str__ returned non-string (type NoneType)
| Python | bsd-3-clause | mfcovington/django-lab-members,mfcovington/django-lab-members,mfcovington/django-lab-members | <REPLACE_OLD> pass
class <REPLACE_NEW> return self.title
class <REPLACE_END> <REPLACE_OLD> pass
<REPLACE_NEW> return self.full_name
<REPLACE_END> <|endoftext|> from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
return self.title
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
return self.full_name
| Fix error: __str__ returned non-string (type NoneType)
from django.db import models
class Position(models.Model):
class Meta:
verbose_name = "Position"
verbose_name_plural = "Positions"
title = models.CharField(u'title',
blank=False,
default='',
help_text=u'Please enter a title for this position',
max_length=64,
unique=True,
)
def __str__(self):
pass
class Scientist(models.Model):
class Meta:
verbose_name = "Scientist"
verbose_name_plural = "Scientists"
full_name = models.CharField(u'full name',
blank=False,
default='',
help_text=u'Please enter a full name for this scientist',
max_length=64,
unique=True,
)
slug = models.SlugField(u'slug',
blank=False,
default='',
help_text=u'Please enter a unique slug for this scientist',
max_length=64,
)
title = models.ForeignKey('lab_members.Position',
blank=True,
default=None,
help_text=u'Please specify a title for this scientist',
null=True,
)
def __str__(self):
pass
|
fd302e3f9cbc5bcf06d47600adc3e0f0df33c114 | f8a_jobs/auth.py | f8a_jobs/auth.py | from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
| from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email,read:org'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
| Add read organization scope for OAuth | Add read organization scope for OAuth
This will enable to access jobs service even for not public organization
members.
| Python | apache-2.0 | fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs | <REPLACE_OLD> 'user:email'},
<REPLACE_NEW> 'user:email,read:org'},
<REPLACE_END> <|endoftext|> from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email,read:org'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
| Add read organization scope for OAuth
This will enable to access jobs service even for not public organization
members.
from flask import session
from flask_oauthlib.client import OAuth
import f8a_jobs.defaults as configuration
oauth = OAuth()
github = oauth.remote_app(
'github',
consumer_key=configuration.GITHUB_CONSUMER_KEY,
consumer_secret=configuration.GITHUB_CONSUMER_SECRET,
request_token_params={'scope': 'user:email'},
base_url='https://api.github.com/',
request_token_url=None,
access_token_method='POST',
access_token_url='https://github.com/login/oauth/access_token',
authorize_url='https://github.com/login/oauth/authorize'
)
@github.tokengetter
def get_github_oauth_token():
return session.get('auth_token')
|
29041cdaf3beca926f1dff1d3f147b7dc07ad8dd | pylp/cli/run.py | pylp/cli/run.py | """
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
| """
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
| Make pylpfile path more readable | Make pylpfile path more readable
| Python | mit | pylp/pylp | <REPLACE_OLD> pylp, <REPLACE_NEW> pylp
import <REPLACE_END> <REPLACE_OLD> logger
# <REPLACE_NEW> logger
from pylp.utils.paths import make_readable_path
# <REPLACE_END> <REPLACE_OLD> exists
if <REPLACE_NEW> exists
readable_path = make_readable_path(path)
if <REPLACE_END> <REPLACE_OLD> logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using <REPLACE_NEW> logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using <REPLACE_END> <REPLACE_OLD> logger.magenta(path))
# <REPLACE_NEW> logger.magenta(readable_path))
# <REPLACE_END> <|endoftext|> """
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp
import pylp.cli.logger as logger
from pylp.utils.paths import make_readable_path
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
readable_path = make_readable_path(path)
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(readable_path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(readable_path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
| Make pylpfile path more readable
"""
Run a pylpfile.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import runpy, os, sys
import traceback
import asyncio
import pylp, pylp.cli.logger as logger
# Run a pylpfile
def run(path, tasks):
# Test if the pylpfile exists
if not os.path.isfile(path):
logger.log(logger.red("Can't read pylpfile "), logger.magenta(path))
sys.exit(-1)
else:
logger.log("Using pylpfile ", logger.magenta(path))
# Run the pylpfile
try:
runpy.run_path(path, None, "pylpfile")
except Exception as e:
traceback.print_exc(file=sys.stdout)
logger.log(logger.red("\nAn error has occurred during the execution of the pylpfile"))
sys.exit(-1)
# Start the tasks
for name in tasks:
pylp.start(name)
# Wait until all task are executed
loop = asyncio.get_event_loop()
loop.run_until_complete(wait_and_quit(loop))
# Wait until all task are executed
async def wait_and_quit(loop):
from pylp.lib.tasks import running
if running:
await asyncio.wait(map(lambda runner: runner.future, running))
|
6696451b7c7a9b2de5b624b47159efae8fcf06b7 | opwen_email_server/api/lokole.py | opwen_email_server/api/lokole.py | def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
| def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
| Disable linter in in-progress code | Disable linter in in-progress code
| Python | apache-2.0 | ascoderu/opwen-cloudserver,ascoderu/opwen-cloudserver | <REPLACE_OLD> upload_info['client_id']
<REPLACE_NEW> upload_info['client_id'] # noqa: F841
<REPLACE_END> <REPLACE_OLD> upload_info['resource_id']
<REPLACE_NEW> upload_info['resource_id'] # noqa: F841
<REPLACE_END> <REPLACE_OLD> upload_info['resource_type']
<REPLACE_NEW> upload_info['resource_type'] # noqa: F841
<REPLACE_END> <REPLACE_OLD> download(client_id):
<REPLACE_NEW> download(client_id): # noqa: F841
<REPLACE_END> <|endoftext|> def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id'] # noqa: F841
resource_id = upload_info['resource_id'] # noqa: F841
resource_type = upload_info['resource_type'] # noqa: F841
raise NotImplementedError
def download(client_id): # noqa: F841
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
| Disable linter in in-progress code
def upload(upload_info):
"""
:type upload_info: dict
"""
client_id = upload_info['client_id']
resource_id = upload_info['resource_id']
resource_type = upload_info['resource_type']
raise NotImplementedError
def download(client_id):
"""
:type client_id: str
:rtype dict
"""
return {
'resource_id': None,
'resource_type': None,
}
|
e7b6aef4db85c777463d2335107145b60b678ae2 | examples/tour_examples/maps_introjs_tour.py | examples/tour_examples/maps_introjs_tour.py | from seleniumbase import BaseCase
class MyTourClass(BaseCase):
def test_google_maps_tour(self):
self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z")
self.wait_for_element("#searchboxinput")
self.wait_for_element("#minimap")
self.wait_for_element("#zoom")
self.create_tour(theme="introjs")
self.add_tour_step("Welcome to Google Maps!",
title="✅ SeleniumBase Tours 🌎")
self.add_tour_step("Type in a location here.", "#searchboxinput",
title="Search Box")
self.add_tour_step("Then click here to show it on the map.",
"#searchbox-searchbutton", alignment="bottom")
self.add_tour_step("Or click here to get driving directions.",
"#searchbox-directions", alignment="bottom")
self.add_tour_step("Use this button to switch to Satellite view.",
"#minimap div.widget-minimap", alignment="right")
self.add_tour_step("Click here to zoom in.", "#widget-zoom-in",
alignment="left")
self.add_tour_step("Or click here to zoom out.", "#widget-zoom-out",
alignment="left")
self.add_tour_step("Use the Menu button to see more options.",
".searchbox-hamburger-container", alignment="right")
self.add_tour_step("Or click here to see more Google apps.",
'[title="Google apps"]', alignment="left")
self.add_tour_step("Thanks for using SeleniumBase Tours!",
title="🚃 End of Guided Tour 🚃")
self.export_tour(filename="google_maps_introjs_tour.js")
self.play_tour()
| Create a new tour example | Create a new tour example
| Python | mit | seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase | <REPLACE_OLD> <REPLACE_NEW> from seleniumbase import BaseCase
class MyTourClass(BaseCase):
def test_google_maps_tour(self):
self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z")
self.wait_for_element("#searchboxinput")
self.wait_for_element("#minimap")
self.wait_for_element("#zoom")
self.create_tour(theme="introjs")
self.add_tour_step("Welcome to Google Maps!",
title="✅ SeleniumBase Tours 🌎")
self.add_tour_step("Type in a location here.", "#searchboxinput",
title="Search Box")
self.add_tour_step("Then click here to show it on the map.",
"#searchbox-searchbutton", alignment="bottom")
self.add_tour_step("Or click here to get driving directions.",
"#searchbox-directions", alignment="bottom")
self.add_tour_step("Use this button to switch to Satellite view.",
"#minimap div.widget-minimap", alignment="right")
self.add_tour_step("Click here to zoom in.", "#widget-zoom-in",
alignment="left")
self.add_tour_step("Or click here to zoom out.", "#widget-zoom-out",
alignment="left")
self.add_tour_step("Use the Menu button to see more options.",
".searchbox-hamburger-container", alignment="right")
self.add_tour_step("Or click here to see more Google apps.",
'[title="Google apps"]', alignment="left")
self.add_tour_step("Thanks for using SeleniumBase Tours!",
title="🚃 End of Guided Tour 🚃")
self.export_tour(filename="google_maps_introjs_tour.js")
self.play_tour()
<REPLACE_END> <|endoftext|> from seleniumbase import BaseCase
class MyTourClass(BaseCase):
def test_google_maps_tour(self):
self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z")
self.wait_for_element("#searchboxinput")
self.wait_for_element("#minimap")
self.wait_for_element("#zoom")
self.create_tour(theme="introjs")
self.add_tour_step("Welcome to Google Maps!",
title="✅ SeleniumBase Tours 🌎")
self.add_tour_step("Type in a location here.", "#searchboxinput",
title="Search Box")
self.add_tour_step("Then click here to show it on the map.",
"#searchbox-searchbutton", alignment="bottom")
self.add_tour_step("Or click here to get driving directions.",
"#searchbox-directions", alignment="bottom")
self.add_tour_step("Use this button to switch to Satellite view.",
"#minimap div.widget-minimap", alignment="right")
self.add_tour_step("Click here to zoom in.", "#widget-zoom-in",
alignment="left")
self.add_tour_step("Or click here to zoom out.", "#widget-zoom-out",
alignment="left")
self.add_tour_step("Use the Menu button to see more options.",
".searchbox-hamburger-container", alignment="right")
self.add_tour_step("Or click here to see more Google apps.",
'[title="Google apps"]', alignment="left")
self.add_tour_step("Thanks for using SeleniumBase Tours!",
title="🚃 End of Guided Tour 🚃")
self.export_tour(filename="google_maps_introjs_tour.js")
self.play_tour()
| Create a new tour example
|
|
d5f02b13db9b6d23e15bc07a985b8c67644ffb44 | pyclibrary/__init__.py | pyclibrary/__init__.py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by PyCLibrary Authors, see AUTHORS for more details.
#
# Distributed under the terms of the MIT/X11 license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
| # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by PyCLibrary Authors, see AUTHORS for more details.
#
# Distributed under the terms of the MIT/X11 license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import logging
logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
| Add NullHandler to avoid logging complaining for nothing. | Add NullHandler to avoid logging complaining for nothing.
| Python | mit | MatthieuDartiailh/pyclibrary,mrh1997/pyclibrary,mrh1997/pyclibrary,MatthieuDartiailh/pyclibrary,mrh1997/pyclibrary,duguxy/pyclibrary,duguxy/pyclibrary,duguxy/pyclibrary | <REPLACE_OLD> absolute_import)
from <REPLACE_NEW> absolute_import)
import logging
logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from <REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by PyCLibrary Authors, see AUTHORS for more details.
#
# Distributed under the terms of the MIT/X11 license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import logging
logging.getLogger('pyclibrary').addHandler(logging.NullHandler())
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
| Add NullHandler to avoid logging complaining for nothing.
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015 by PyCLibrary Authors, see AUTHORS for more details.
#
# Distributed under the terms of the MIT/X11 license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from .c_parser import win_defs, CParser
from .c_library import CLibrary, address_of, build_array
from .errors import DefinitionError
from .init import init, auto_init
|
45fc612fdc5a354dbf0bacccd345b1aebcc73e59 | tests/test_openweather.py | tests/test_openweather.py | # -*- coding: utf-8 -*-
import bot_mock
from pyfibot.modules import module_openweather
from utils import check_re
bot = bot_mock.BotMock()
def test_weather():
regex = u'Lappeenranta, FI: Temperature: \d+.\d\xb0C, feels like: \d+.\d\xb0C, wind: \d+.\d m/s, humidity: \d+%, pressure: \d+ hPa, cloudiness: \d+%'
check_re(regex, module_openweather.command_weather(bot, None, "#channel", 'lappeenranta')[1])
def test_forecast():
regex = u'Lappeenranta, Finland: tomorrow: \d+.\d-\d+.\d \xb0C \(.*?\), in 2 days: \d+.\d-\d+.\d \xb0C \(.*?\), in 3 days: \d+.\d-\d+.\d \xb0C \(.*?\)'
check_re(regex, module_openweather.command_forecast(bot, None, "#channel", 'lappeenranta')[1])
| # -*- coding: utf-8 -*-
import bot_mock
from pyfibot.modules import module_openweather
from utils import check_re
bot = bot_mock.BotMock()
def test_weather():
regex = u'Lappeenranta, FI: Temperature: \d+.\d\xb0C, feels like: \d+.\d\xb0C, wind: \d+.\d m/s, humidity: \d+%, pressure: \d+ hPa, cloudiness: \d+%'
check_re(regex, module_openweather.command_weather(bot, None, "#channel", 'lappeenranta')[1])
def test_forecast():
regex = u'Lappeenranta, FI: tomorrow: \d+.\d-\d+.\d \xb0C \(.*?\), in 2 days: \d+.\d-\d+.\d \xb0C \(.*?\), in 3 days: \d+.\d-\d+.\d \xb0C \(.*?\)'
check_re(regex, module_openweather.command_forecast(bot, None, "#channel", 'lappeenranta')[1])
| Revert "Fix openweather unit tests" | Revert "Fix openweather unit tests"
This reverts commit 36e100e649f0a337228a6d7375358d23afd544ff.
Open Weather Map has reverted back to their old api or something like that...
| Python | bsd-3-clause | rnyberg/pyfibot,EArmour/pyfibot,aapa/pyfibot,aapa/pyfibot,lepinkainen/pyfibot,rnyberg/pyfibot,lepinkainen/pyfibot,huqa/pyfibot,huqa/pyfibot,EArmour/pyfibot | <REPLACE_OLD> Finland: <REPLACE_NEW> FI: <REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
import bot_mock
from pyfibot.modules import module_openweather
from utils import check_re
bot = bot_mock.BotMock()
def test_weather():
regex = u'Lappeenranta, FI: Temperature: \d+.\d\xb0C, feels like: \d+.\d\xb0C, wind: \d+.\d m/s, humidity: \d+%, pressure: \d+ hPa, cloudiness: \d+%'
check_re(regex, module_openweather.command_weather(bot, None, "#channel", 'lappeenranta')[1])
def test_forecast():
regex = u'Lappeenranta, FI: tomorrow: \d+.\d-\d+.\d \xb0C \(.*?\), in 2 days: \d+.\d-\d+.\d \xb0C \(.*?\), in 3 days: \d+.\d-\d+.\d \xb0C \(.*?\)'
check_re(regex, module_openweather.command_forecast(bot, None, "#channel", 'lappeenranta')[1])
| Revert "Fix openweather unit tests"
This reverts commit 36e100e649f0a337228a6d7375358d23afd544ff.
Open Weather Map has reverted back to their old api or something like that...
# -*- coding: utf-8 -*-
import bot_mock
from pyfibot.modules import module_openweather
from utils import check_re
bot = bot_mock.BotMock()
def test_weather():
regex = u'Lappeenranta, FI: Temperature: \d+.\d\xb0C, feels like: \d+.\d\xb0C, wind: \d+.\d m/s, humidity: \d+%, pressure: \d+ hPa, cloudiness: \d+%'
check_re(regex, module_openweather.command_weather(bot, None, "#channel", 'lappeenranta')[1])
def test_forecast():
regex = u'Lappeenranta, Finland: tomorrow: \d+.\d-\d+.\d \xb0C \(.*?\), in 2 days: \d+.\d-\d+.\d \xb0C \(.*?\), in 3 days: \d+.\d-\d+.\d \xb0C \(.*?\)'
check_re(regex, module_openweather.command_forecast(bot, None, "#channel", 'lappeenranta')[1])
|
c600d1e1ad3cef69f6028afd64e14a04c747e1c6 | tests/test_install.py | tests/test_install.py | import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['2.6.1', '--type', 'pypy']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['2.6.1', '--type', 'pypy']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
| import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
| Replace version of Python to install in test_{un,}install test | Replace version of Python to install in test_{un,}install test
PyPy 2.6.1's download link is not working anymore.
| Python | mit | berdario/pew,berdario/pew | <REPLACE_OLD> ['2.6.1', '--type', 'pypy']
<REPLACE_NEW> ['3.5.1']
<REPLACE_END> <REPLACE_OLD> ['2.6.1', '--type', 'pypy']
<REPLACE_NEW> ['3.5.1']
<REPLACE_END> <|endoftext|> import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['3.5.1']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['3.5.1']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
| Replace version of Python to install in test_{un,}install test
PyPy 2.6.1's download link is not working anymore.
import sys
import os
from subprocess import check_call
from pew._utils import invoke_pew as invoke
from utils import skip_windows, connection_required
import pytest
def skip_marker(f):
return skip_windows(reason='Pythonz unavailable in Windows')(
pytest.mark.skipif(
sys.platform == 'cygwin',
reason='Pythonz unavailable in Cygwin')(
pytest.mark.skipif(os.environ.get('NIX'),
reason='Pythonz unavailable in Nix')(
connection_required(f))))
@skip_marker
def test_install():
py_version = ['2.6.1', '--type', 'pypy']
assert invoke('install', *py_version).returncode == 0
py = invoke('locate_python', *py_version).out
check_call([py, '-V'])
@skip_marker
def test_uninstall():
py_version = ['2.6.1', '--type', 'pypy']
invoke('install', *py_version)
assert invoke('uninstall', *py_version).returncode == 0
assert invoke('locate_python', *py_version).returncode != 0
|
1065f63e29c9b31f55ed1986c409fc85f1aa26e3 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
language = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
syntax = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
| Change 'language' to 'syntax', that is more precise terminology. | Change 'language' to 'syntax', that is more precise terminology.
| Python | mit | SublimeLinter/SublimeLinter-json | <REPLACE_OLD> language <REPLACE_NEW> syntax <REPLACE_END> <|endoftext|> #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
syntax = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
| Change 'language' to 'syntax', that is more precise terminology.
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-json
# License: MIT
#
"""This module exports the JSON plugin linter class."""
import json
from SublimeLinter.lint import Linter
class JSON(Linter):
"""Provides an interface to json.loads()."""
language = 'json'
cmd = None
regex = r'^(?P<message>.+):\s*line (?P<line>\d+) column (?P<col>\d+)'
def run(self, cmd, code):
"""Attempt to parse code as JSON, return '' if it succeeds, the error message if it fails."""
try:
json.loads(code)
return ''
except ValueError as err:
return str(err)
|
945baec1540ff72b85b3d0563511d93cb33d660e | nbgrader/tests/formgrader/fakeuser.py | nbgrader/tests/formgrader/fakeuser.py | import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# don't forward signals
os.setpgrp()
# start in the cwd
os.chdir(home)
return preexec
| import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# start in the cwd
os.chdir(home)
return preexec
| Remove os.setpgrp() from fake spawner | Remove os.setpgrp() from fake spawner
| Python | bsd-3-clause | jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jupyter/nbgrader | <DELETE> don't forward signals
os.setpgrp()
# <DELETE_END> <|endoftext|> import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# start in the cwd
os.chdir(home)
return preexec
| Remove os.setpgrp() from fake spawner
import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# don't forward signals
os.setpgrp()
# start in the cwd
os.chdir(home)
return preexec
|
f9b079b7956419ec324234dbad11d073bed70dd8 | users/views.py | users/views.py | from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super(UserViewSet, self).retrieve(request, pk)
| from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super().retrieve(request, pk)
| Use Python 3 style for super | Use Python 3 style for super
| Python | bsd-3-clause | FreeMusicNinja/api.freemusic.ninja | <REPLACE_OLD> super(UserViewSet, self).retrieve(request, <REPLACE_NEW> super().retrieve(request, <REPLACE_END> <|endoftext|> from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super().retrieve(request, pk)
| Use Python 3 style for super
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
return redirect('user-detail', request.user.pk)
else:
return super(UserViewSet, self).retrieve(request, pk)
|
97229a7e51279906254a7befa0456a4c89a9b0ea | planner/models.py | planner/models.py | from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
| from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
def __unicode__(self):
return str(self.waypoint)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
def __unicode__(self):
return str(self.description)
| Add unicode methods to model classes | Add unicode methods to model classes
| Python | apache-2.0 | jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip | <REPLACE_OLD> models.CharField(max_length=63)
class <REPLACE_NEW> models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class <REPLACE_END> <REPLACE_OLD> models.ForeignKey(Route)
# <REPLACE_NEW> models.ForeignKey(Route)
def __unicode__(self):
return str(self.waypoint)
# <REPLACE_END> <REPLACE_OLD> models.OneToOneField(Route)
# <REPLACE_NEW> models.OneToOneField(Route)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
# <REPLACE_END> <REPLACE_OLD> models.ForeignKey(RoadTrip)
<REPLACE_NEW> models.ForeignKey(RoadTrip)
def __unicode__(self):
return str(self.description)
<REPLACE_END> <|endoftext|> from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
def __unicode__(self):
return str(self.waypoint)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
def __unicode__(self):
return str(self.description)
| Add unicode methods to model classes
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateTimeField()
end_date = models.DateTimeField()
route = models.OneToOneField(Route)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
trip = models.ForeignKey(RoadTrip)
|
56cdcde184b613dabdcc3f999b90915f75e03726 | tests/backends/__init__.py | tests/backends/__init__.py | from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| Update test to check basic case for playback without current track | Update test to check basic case for playback without current track
| Python | apache-2.0 | hkariti/mopidy,mopidy/mopidy,abarisain/mopidy,quartz55/mopidy,ZenithDK/mopidy,quartz55/mopidy,priestd09/mopidy,diandiankan/mopidy,dbrgn/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,adamcik/mopidy,rawdlite/mopidy,liamw9534/mopidy,pacificIT/mopidy,jcass77/mopidy,bencevans/mopidy,mokieyue/mopidy,swak/mopidy,bacontext/mopidy,mopidy/mopidy,quartz55/mopidy,hkariti/mopidy,ali/mopidy,rawdlite/mopidy,priestd09/mopidy,adamcik/mopidy,jodal/mopidy,kingosticks/mopidy,glogiotatidis/mopidy,ali/mopidy,jodal/mopidy,kingosticks/mopidy,mokieyue/mopidy,dbrgn/mopidy,tkem/mopidy,bacontext/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,adamcik/mopidy,hkariti/mopidy,jcass77/mopidy,ali/mopidy,pacificIT/mopidy,vrs01/mopidy,swak/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,jodal/mopidy,tkem/mopidy,kingosticks/mopidy,bacontext/mopidy,jmarsik/mopidy,dbrgn/mopidy,bencevans/mopidy,ZenithDK/mopidy,jcass77/mopidy,ali/mopidy,SuperStarPL/mopidy,hkariti/mopidy,vrs01/mopidy,bacontext/mopidy,vrs01/mopidy,tkem/mopidy,dbrgn/mopidy,rawdlite/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,mokieyue/mopidy,diandiankan/mopidy,jmarsik/mopidy,quartz55/mopidy,vrs01/mopidy,liamw9534/mopidy,ZenithDK/mopidy,pacificIT/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,bencevans/mopidy,bencevans/mopidy,tkem/mopidy,priestd09/mopidy,swak/mopidy,mopidy/mopidy,abarisain/mopidy,ZenithDK/mopidy,swak/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,rawdlite/mopidy,diandiankan/mopidy,SuperStarPL/mopidy | <REPLACE_OLD> test_play(self):
<REPLACE_NEW> test_play_with_no_current_track(self):
<REPLACE_END> <INSERT> result = <INSERT_END> <INSERT> self.assertEqual(result, False)
<INSERT_END> <REPLACE_OLD> playback.PLAYING)
<REPLACE_NEW> playback.STOPPED)
<REPLACE_END> <|endoftext|> from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| Update test to check basic case for playback without current track
from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
|
12e9814d0225960450bb7cf0fc80502cef13195b | rewind/test/test_code.py | rewind/test/test_code.py | """Test code format and coding standards."""
import importlib
import inspect
import pkgutil
import unittest
def setUpModule():
global modules
modules = [name for _, name, ispkg in pkgutil.walk_packages(['rewind'],
'rewind.')
if not ispkg and not name.startswith('rewind.test.') and
not name.startswith('rewind.messages.')]
assert modules, "Expected to have found a couple of modules. Did not."
modules = map(importlib.import_module, modules)
def tearDownModule():
"""Clearing up global namespace in test_code."""
global modules
del modules
def _get_public_classes_from_object(obj, prepend_name=''):
classes = [(prepend_name+name, value)
for name, value in inspect.getmembers(obj)
if inspect.isclass(value) and not name.startswith('_')]
result = list(classes)
for name, value in classes:
partialres = _get_public_classes_from_object(value,
'{0}.'.format(name))
result.extend(partialres)
return result
def _get_public_classes():
classes = []
for module in modules:
assert inspect.ismodule(module)
someclasses = _get_public_classes_from_object(module,
'{0}.'.format(module.__name__))
classes.extend(someclasses)
return classes
class TestPydoc(unittest.TestCase):
"""Tests for pydoc."""
def testAllPublicClasses(self):
"""Test that all public classes have a pydoc."""
classes = _get_public_classes()
self.assertNotEqual(len(classes), 0)
for classname, clazz in classes:
doc = inspect.getdoc(clazz)
msg = "{0} lacks a Pydoc string.".format(classname)
self.assertTrue(doc and len(doc) > 4, msg)
| Test that asserts all public classes have pydoc | Test that asserts all public classes have pydoc
| Python | agpl-3.0 | JensRantil/rewind,JensRantil/rewind-client | <REPLACE_OLD> <REPLACE_NEW> """Test code format and coding standards."""
import importlib
import inspect
import pkgutil
import unittest
def setUpModule():
global modules
modules = [name for _, name, ispkg in pkgutil.walk_packages(['rewind'],
'rewind.')
if not ispkg and not name.startswith('rewind.test.') and
not name.startswith('rewind.messages.')]
assert modules, "Expected to have found a couple of modules. Did not."
modules = map(importlib.import_module, modules)
def tearDownModule():
"""Clearing up global namespace in test_code."""
global modules
del modules
def _get_public_classes_from_object(obj, prepend_name=''):
classes = [(prepend_name+name, value)
for name, value in inspect.getmembers(obj)
if inspect.isclass(value) and not name.startswith('_')]
result = list(classes)
for name, value in classes:
partialres = _get_public_classes_from_object(value,
'{0}.'.format(name))
result.extend(partialres)
return result
def _get_public_classes():
classes = []
for module in modules:
assert inspect.ismodule(module)
someclasses = _get_public_classes_from_object(module,
'{0}.'.format(module.__name__))
classes.extend(someclasses)
return classes
class TestPydoc(unittest.TestCase):
"""Tests for pydoc."""
def testAllPublicClasses(self):
"""Test that all public classes have a pydoc."""
classes = _get_public_classes()
self.assertNotEqual(len(classes), 0)
for classname, clazz in classes:
doc = inspect.getdoc(clazz)
msg = "{0} lacks a Pydoc string.".format(classname)
self.assertTrue(doc and len(doc) > 4, msg)
<REPLACE_END> <|endoftext|> """Test code format and coding standards."""
import importlib
import inspect
import pkgutil
import unittest
def setUpModule():
global modules
modules = [name for _, name, ispkg in pkgutil.walk_packages(['rewind'],
'rewind.')
if not ispkg and not name.startswith('rewind.test.') and
not name.startswith('rewind.messages.')]
assert modules, "Expected to have found a couple of modules. Did not."
modules = map(importlib.import_module, modules)
def tearDownModule():
"""Clearing up global namespace in test_code."""
global modules
del modules
def _get_public_classes_from_object(obj, prepend_name=''):
classes = [(prepend_name+name, value)
for name, value in inspect.getmembers(obj)
if inspect.isclass(value) and not name.startswith('_')]
result = list(classes)
for name, value in classes:
partialres = _get_public_classes_from_object(value,
'{0}.'.format(name))
result.extend(partialres)
return result
def _get_public_classes():
classes = []
for module in modules:
assert inspect.ismodule(module)
someclasses = _get_public_classes_from_object(module,
'{0}.'.format(module.__name__))
classes.extend(someclasses)
return classes
class TestPydoc(unittest.TestCase):
"""Tests for pydoc."""
def testAllPublicClasses(self):
"""Test that all public classes have a pydoc."""
classes = _get_public_classes()
self.assertNotEqual(len(classes), 0)
for classname, clazz in classes:
doc = inspect.getdoc(clazz)
msg = "{0} lacks a Pydoc string.".format(classname)
self.assertTrue(doc and len(doc) > 4, msg)
| Test that asserts all public classes have pydoc
|
|
f84ba2d213636482951553cc453b33a4bac8541f | pytest_doctest_custom.py | pytest_doctest_custom.py | """Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
| Create the plugin based on PyScanPrev conftest.py | Create the plugin based on PyScanPrev conftest.py
| Python | mit | danilobellini/pytest-doctest-custom | <REPLACE_OLD> <REPLACE_NEW> """Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
<REPLACE_END> <|endoftext|> """Py.test doctest custom plugin"""
# By Danilo J. S. Bellini
import sys, functools
def printer(value):
"""Prints the object representation using the given custom formatter."""
if value is not None:
print(printer.repr(value)) # This attribute has to be set elsewhere
def temp_replace(obj, attr_name, value):
"""
Returns a decorator that replaces obj.attr = value before calling the
wrapped function and restores obj.attr afterwards.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
backup = getattr(obj, attr_name)
setattr(obj, attr_name, value)
result = func(*args, **kwargs)
setattr(obj, attr_name, backup)
return result
return wrapper
return decorator
def pytest_configure(config):
"""
Hook for changing ``doctest.DocTestRunner.run`` method so that the
``sys.__displayhook__`` calls the given printer function while a doctest
is running.
"""
import doctest
enable_printer = temp_replace(sys, "__displayhook__", printer)
doctest.DocTestRunner.run = enable_printer(doctest.DocTestRunner.run)
# As the public method doctest.DocTestRunner.run replaces sys.displayhook
# by sys.__displayhook__, we could also had changed "displayhook" on the
# _DocTestRunner__run protected method
| Create the plugin based on PyScanPrev conftest.py
|
|
d82b1f9d7334c1cd976624788da785a87cd5db8a | functional/tests/volume/v1/test_qos.py | functional/tests/volume/v1/test_qos.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
| Add functional tests for volume qos | Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00
| Python | apache-2.0 | openstack/python-openstackclient,redhat-openstack/python-openstackclient,BjoernT/python-openstackclient,BjoernT/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient,redhat-openstack/python-openstackclient,dtroyer/python-openstackclient | <REPLACE_OLD> <REPLACE_NEW> # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
<REPLACE_END> <|endoftext|> # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
| Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00
|
|
9cfe03ab06f126406a51c0945e990fc849d8dfb9 | scripts/crontab/gen-crons.py | scripts/crontab/gen-crons.py | #!/usr/bin/env python
import os
from optparse import OptionParser
from jinja2 import Template
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-k", "--kitsune",
help="Location of kitsune (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.6",
help="Python interpreter to use")
(opts, args) = parser.parse_args()
if not opts.kitsune:
parser.error("-k must be defined")
ctx = {'django': 'cd %s; %s manage.py' % (opts.kitsune, opts.python),}
ctx['cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print Template(TEMPLATE).render(**ctx)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import os
from optparse import OptionParser
from jinja2 import Template
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-k", "--kitsune",
help="Location of kitsune (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.6",
help="Python interpreter to use")
(opts, args) = parser.parse_args()
if not opts.kitsune:
parser.error("-k must be defined")
# To pick up the right PyOpenSSL:
python_path = 'PYTHONPATH=/usr/local/lib64/python2.6/site-packages'
ctx = {'django': 'cd %s; %s %s manage.py' % (
opts.kitsune, python_path, opts.python),}
ctx['cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print Template(TEMPLATE).render(**ctx)
if __name__ == "__main__":
main()
| Add local site-packages to PYTHONPATH. | Add local site-packages to PYTHONPATH.
To pick up the local version of PyOpenSSL.
| Python | bsd-3-clause | philipp-sumo/kitsune,iDTLabssl/kitsune,orvi2014/kitsune,silentbob73/kitsune,NewPresident1/kitsune,MziRintu/kitsune,YOTOV-LIMITED/kitsune,iDTLabssl/kitsune,YOTOV-LIMITED/kitsune,safwanrahman/kitsune,silentbob73/kitsune,safwanrahman/linuxdesh,mythmon/kitsune,YOTOV-LIMITED/kitsune,iDTLabssl/kitsune,turtleloveshoes/kitsune,chirilo/kitsune,NewPresident1/kitsune,rlr/kitsune,turtleloveshoes/kitsune,MikkCZ/kitsune,brittanystoroz/kitsune,NewPresident1/kitsune,NewPresident1/kitsune,orvi2014/kitsune,silentbob73/kitsune,Osmose/kitsune,silentbob73/kitsune,MikkCZ/kitsune,chirilo/kitsune,mythmon/kitsune,dbbhattacharya/kitsune,philipp-sumo/kitsune,safwanrahman/linuxdesh,anushbmx/kitsune,dbbhattacharya/kitsune,asdofindia/kitsune,safwanrahman/kitsune,MziRintu/kitsune,asdofindia/kitsune,dbbhattacharya/kitsune,mythmon/kitsune,Osmose/kitsune,feer56/Kitsune2,feer56/Kitsune1,H1ghT0p/kitsune,mozilla/kitsune,feer56/Kitsune1,mozilla/kitsune,Osmose/kitsune,turtleloveshoes/kitsune,iDTLabssl/kitsune,safwanrahman/kitsune,brittanystoroz/kitsune,mozilla/kitsune,brittanystoroz/kitsune,MikkCZ/kitsune,mozilla/kitsune,Osmose/kitsune,dbbhattacharya/kitsune,rlr/kitsune,mythmon/kitsune,feer56/Kitsune2,turtleloveshoes/kitsune,YOTOV-LIMITED/kitsune,philipp-sumo/kitsune,chirilo/kitsune,feer56/Kitsune2,asdofindia/kitsune,orvi2014/kitsune,H1ghT0p/kitsune,anushbmx/kitsune,orvi2014/kitsune,MziRintu/kitsune,brittanystoroz/kitsune,H1ghT0p/kitsune,feer56/Kitsune1,safwanrahman/linuxdesh,safwanrahman/kitsune,chirilo/kitsune,MikkCZ/kitsune,anushbmx/kitsune,feer56/Kitsune2,MziRintu/kitsune,rlr/kitsune,rlr/kitsune,H1ghT0p/kitsune,anushbmx/kitsune,asdofindia/kitsune | <INSERT> # To pick up the right PyOpenSSL:
python_path = 'PYTHONPATH=/usr/local/lib64/python2.6/site-packages'
<INSERT_END> <INSERT> %s <INSERT_END> <REPLACE_OLD> (opts.kitsune, <REPLACE_NEW> (
opts.kitsune, python_path, <REPLACE_END> <|endoftext|> #!/usr/bin/env python
import os
from optparse import OptionParser
from jinja2 import Template
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-k", "--kitsune",
help="Location of kitsune (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.6",
help="Python interpreter to use")
(opts, args) = parser.parse_args()
if not opts.kitsune:
parser.error("-k must be defined")
# To pick up the right PyOpenSSL:
python_path = 'PYTHONPATH=/usr/local/lib64/python2.6/site-packages'
ctx = {'django': 'cd %s; %s %s manage.py' % (
opts.kitsune, python_path, opts.python),}
ctx['cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print Template(TEMPLATE).render(**ctx)
if __name__ == "__main__":
main()
| Add local site-packages to PYTHONPATH.
To pick up the local version of PyOpenSSL.
#!/usr/bin/env python
import os
from optparse import OptionParser
from jinja2 import Template
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
def main():
parser = OptionParser()
parser.add_option("-k", "--kitsune",
help="Location of kitsune (required)")
parser.add_option("-u", "--user",
help=("Prefix cron with this user. "
"Only define for cron.d style crontabs"))
parser.add_option("-p", "--python", default="/usr/bin/python2.6",
help="Python interpreter to use")
(opts, args) = parser.parse_args()
if not opts.kitsune:
parser.error("-k must be defined")
ctx = {'django': 'cd %s; %s manage.py' % (opts.kitsune, opts.python),}
ctx['cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
print Template(TEMPLATE).render(**ctx)
if __name__ == "__main__":
main()
|
bdeb28f2f7840c04dbf65b6c0771c121f229e59a | tests.py | tests.py | #!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
| Fix test case for multiple locations of a namespace | Fix test case for multiple locations of a namespace
| Python | mit | ironfroggy/straight.plugin,pombredanne/straight.plugin | <REPLACE_OLD> self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
<REPLACE_NEW> sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
<REPLACE_END> <REPLACE_OLD> self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
<REPLACE_NEW> sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
| Fix test case for multiple locations of a namespace
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
5da30efc6cbbc58db60ba29643c56448b5a79e77 | test/test_pipeline/components/test_base.py | test/test_pipeline/components/test_base.py | import unittest
from autosklearn.pipeline.components.base import find_components, \
AutoSklearnClassificationAlgorithm
class TestBase(unittest.TestCase):
def test_find_components(self):
c = find_components('dummy_components', 'dummy_components',
AutoSklearnClassificationAlgorithm)
self.assertEqual(len(c), 2)
self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1')
self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
| import os
import sys
import unittest
from autosklearn.pipeline.components.base import find_components, \
AutoSklearnClassificationAlgorithm
this_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(this_dir)
class TestBase(unittest.TestCase):
def test_find_components(self):
c = find_components('dummy_components',
os.path.join(this_dir, 'dummy_components'),
AutoSklearnClassificationAlgorithm)
self.assertEqual(len(c), 2)
self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1')
self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
| FIX fix unit test by fixing import paths | FIX fix unit test by fixing import paths
| Python | bsd-3-clause | automl/auto-sklearn,automl/auto-sklearn | <INSERT> os
import sys
import <INSERT_END> <REPLACE_OLD> AutoSklearnClassificationAlgorithm
class <REPLACE_NEW> AutoSklearnClassificationAlgorithm
this_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(this_dir)
class <REPLACE_END> <REPLACE_OLD> find_components('dummy_components', 'dummy_components',
<REPLACE_NEW> find_components('dummy_components',
os.path.join(this_dir, 'dummy_components'),
<REPLACE_END> <|endoftext|> import os
import sys
import unittest
from autosklearn.pipeline.components.base import find_components, \
AutoSklearnClassificationAlgorithm
this_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(this_dir)
class TestBase(unittest.TestCase):
def test_find_components(self):
c = find_components('dummy_components',
os.path.join(this_dir, 'dummy_components'),
AutoSklearnClassificationAlgorithm)
self.assertEqual(len(c), 2)
self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1')
self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
| FIX fix unit test by fixing import paths
import unittest
from autosklearn.pipeline.components.base import find_components, \
AutoSklearnClassificationAlgorithm
class TestBase(unittest.TestCase):
def test_find_components(self):
c = find_components('dummy_components', 'dummy_components',
AutoSklearnClassificationAlgorithm)
self.assertEqual(len(c), 2)
self.assertEqual(c['dummy_component_1'].__name__, 'DummyComponent1')
self.assertEqual(c['dummy_component_2'].__name__, 'DummyComponent2')
|
29e9d3a5fbac2730acd4c2115399556b09fb83e5 | tools/psycopg2_experiment.py | tools/psycopg2_experiment.py | #!/usr/bin/env python
'''
A CLI tool for formulating an Abba url using data from PostgreSQL
'''
from __future__ import print_function
import argparse
import psycopg2
import sys
TOOL_DESCRIPTION = '''
Formulates an Abba url using data from PostgreSQL
The query passed to this tool should return three columns, which will
become the label, success count, and trial count in Abba. If the query
flag is not specified, the query will be taken from standard input.
Note that the db parameters are optional, and if not provided psycopg2
will attempt to connect to the default locally-hosted database.
'''
def parse_arguments():
'''
Parse the arguments from the command line for this program
'''
parser = argparse.ArgumentParser(
description=TOOL_DESCRIPTION,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
'-d', '--db_params', metavar='PARAMS',
help='A libpq connection string with params for the target database'
)
parser.add_argument(
'-q', '--query',
help='The query which will provide the data for Abba',
)
return parser.parse_args()
def build_url_from_database_query(dsn, query):
'''
Build an Abba URL using data from a PostgreSQL connection and query
'''
url_template = 'http://thumbtack.com/labs/abba#{}'
cursor = psycopg2.connect(dsn).cursor()
cursor.execute(query)
if not cursor.rowcount:
return url_template.format('')
rows = cursor.fetchall()
if len(rows[0]) != 3:
raise ValueError('Query does not return 3 columns of data')
groups_querystr = '&'.join('{}={}%2C{}'.format(*row) for row in rows)
return url_template.format(groups_querystr)
def main():
args = parse_arguments()
query = args.query if args.query is not None else sys.stdin.read()
params = args.db_params if args.db_params else ''
print(build_url_from_database_query(params, query))
if __name__ == '__main__':
main()
| Add tool for pulling data from PostgreSQL to Abba | Add tool for pulling data from PostgreSQL to Abba
| Python | bsd-3-clause | thumbtack/abba,thii/abbajs,thumbtack/abba,thumbtack/abba | <REPLACE_OLD> <REPLACE_NEW> #!/usr/bin/env python
'''
A CLI tool for formulating an Abba url using data from PostgreSQL
'''
from __future__ import print_function
import argparse
import psycopg2
import sys
TOOL_DESCRIPTION = '''
Formulates an Abba url using data from PostgreSQL
The query passed to this tool should return three columns, which will
become the label, success count, and trial count in Abba. If the query
flag is not specified, the query will be taken from standard input.
Note that the db parameters are optional, and if not provided psycopg2
will attempt to connect to the default locally-hosted database.
'''
def parse_arguments():
'''
Parse the arguments from the command line for this program
'''
parser = argparse.ArgumentParser(
description=TOOL_DESCRIPTION,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
'-d', '--db_params', metavar='PARAMS',
help='A libpq connection string with params for the target database'
)
parser.add_argument(
'-q', '--query',
help='The query which will provide the data for Abba',
)
return parser.parse_args()
def build_url_from_database_query(dsn, query):
'''
Build an Abba URL using data from a PostgreSQL connection and query
'''
url_template = 'http://thumbtack.com/labs/abba#{}'
cursor = psycopg2.connect(dsn).cursor()
cursor.execute(query)
if not cursor.rowcount:
return url_template.format('')
rows = cursor.fetchall()
if len(rows[0]) != 3:
raise ValueError('Query does not return 3 columns of data')
groups_querystr = '&'.join('{}={}%2C{}'.format(*row) for row in rows)
return url_template.format(groups_querystr)
def main():
args = parse_arguments()
query = args.query if args.query is not None else sys.stdin.read()
params = args.db_params if args.db_params else ''
print(build_url_from_database_query(params, query))
if __name__ == '__main__':
main()
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
'''
A CLI tool for formulating an Abba url using data from PostgreSQL
'''
from __future__ import print_function
import argparse
import psycopg2
import sys
TOOL_DESCRIPTION = '''
Formulates an Abba url using data from PostgreSQL
The query passed to this tool should return three columns, which will
become the label, success count, and trial count in Abba. If the query
flag is not specified, the query will be taken from standard input.
Note that the db parameters are optional, and if not provided psycopg2
will attempt to connect to the default locally-hosted database.
'''
def parse_arguments():
'''
Parse the arguments from the command line for this program
'''
parser = argparse.ArgumentParser(
description=TOOL_DESCRIPTION,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
'-d', '--db_params', metavar='PARAMS',
help='A libpq connection string with params for the target database'
)
parser.add_argument(
'-q', '--query',
help='The query which will provide the data for Abba',
)
return parser.parse_args()
def build_url_from_database_query(dsn, query):
'''
Build an Abba URL using data from a PostgreSQL connection and query
'''
url_template = 'http://thumbtack.com/labs/abba#{}'
cursor = psycopg2.connect(dsn).cursor()
cursor.execute(query)
if not cursor.rowcount:
return url_template.format('')
rows = cursor.fetchall()
if len(rows[0]) != 3:
raise ValueError('Query does not return 3 columns of data')
groups_querystr = '&'.join('{}={}%2C{}'.format(*row) for row in rows)
return url_template.format(groups_querystr)
def main():
args = parse_arguments()
query = args.query if args.query is not None else sys.stdin.read()
params = args.db_params if args.db_params else ''
print(build_url_from_database_query(params, query))
if __name__ == '__main__':
main()
| Add tool for pulling data from PostgreSQL to Abba
|
|
384e2fd9ae794e182dfdf4072d2689cff5f5d91d | log4django/routers.py | log4django/routers.py | from .settings import CONNECTION_NAME
class Log4DjangoRouter(object):
def db_for_read(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == 'log4django' or obj2._meta.app_label == 'log4django':
return True
return None
def allow_syncdb(self, db, model):
if db == CONNECTION_NAME:
return model._meta.app_label == 'log4django'
elif model._meta.app_label == 'log4django':
return False
return None | from .settings import CONNECTION_NAME
class Log4DjangoRouter(object):
def db_for_read(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == 'log4django' or obj2._meta.app_label == 'log4django':
return True
return None
def allow_syncdb(self, db, model):
if db == CONNECTION_NAME and model._meta.app_label == 'log4django':
return True
return None | Fix syncdb in database router | Fix syncdb in database router
| Python | bsd-3-clause | CodeScaleInc/log4django,CodeScaleInc/log4django,CodeScaleInc/log4django | <REPLACE_OLD> CONNECTION_NAME:
return model._meta.app_label == 'log4django'
elif <REPLACE_NEW> CONNECTION_NAME and <REPLACE_END> <REPLACE_OLD> False
<REPLACE_NEW> True
<REPLACE_END> <|endoftext|> from .settings import CONNECTION_NAME
class Log4DjangoRouter(object):
def db_for_read(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == 'log4django' or obj2._meta.app_label == 'log4django':
return True
return None
def allow_syncdb(self, db, model):
if db == CONNECTION_NAME and model._meta.app_label == 'log4django':
return True
return None | Fix syncdb in database router
from .settings import CONNECTION_NAME
class Log4DjangoRouter(object):
def db_for_read(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def db_for_write(self, model, **hints):
if model._meta.app_label == 'log4django':
return CONNECTION_NAME
return None
def allow_relation(self, obj1, obj2, **hints):
if obj1._meta.app_label == 'log4django' or obj2._meta.app_label == 'log4django':
return True
return None
def allow_syncdb(self, db, model):
if db == CONNECTION_NAME:
return model._meta.app_label == 'log4django'
elif model._meta.app_label == 'log4django':
return False
return None |
28e9129a71cac0ab60071d6e2a6bd258312703a8 | example_script3.py | example_script3.py | """
Usage: python -m recipy example_script3.py OUTPUT.npy
"""
from __future__ import print_function
import sys
import numpy
if len(sys.argv) < 2:
print(__doc__, file=sys.stderr)
sys.exit(1)
arr = numpy.arange(10)
arr = arr + 500
# We've made a fairly big change here!
numpy.save(sys.argv[1], arr)
| Add example script for python -m recipy usage | Add example script for python -m recipy usage
| Python | apache-2.0 | github4ry/recipy,musically-ut/recipy,github4ry/recipy,MBARIMike/recipy,MichielCottaar/recipy,MBARIMike/recipy,recipy/recipy,recipy/recipy,MichielCottaar/recipy,musically-ut/recipy | <INSERT> """
Usage: python -m recipy example_script3.py OUTPUT.npy
"""
from __future__ import print_function
import sys
import numpy
if len(sys.argv) < 2:
<INSERT_END> <INSERT> print(__doc__, file=sys.stderr)
sys.exit(1)
arr = numpy.arange(10)
arr = arr + 500
# We've made a fairly big change here!
numpy.save(sys.argv[1], arr)
<INSERT_END> <|endoftext|> """
Usage: python -m recipy example_script3.py OUTPUT.npy
"""
from __future__ import print_function
import sys
import numpy
if len(sys.argv) < 2:
print(__doc__, file=sys.stderr)
sys.exit(1)
arr = numpy.arange(10)
arr = arr + 500
# We've made a fairly big change here!
numpy.save(sys.argv[1], arr)
| Add example script for python -m recipy usage
|
|
40af69656b71cda7f775cface3478106f070ed35 | numba/__init__.py | numba/__init__.py | import sys
import logging
# NOTE: Be sure to keep the logging level commented out before commiting. See:
# https://github.com/numba/numba/issues/31
# A good work around is to make your tests handle a debug flag, per
# numba.tests.test_support.main().
logging.basicConfig(#level=logging.DEBUG,
format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s")
try:
from . import minivect
except ImportError:
print(logging.error("Did you forget to update submodule minivect?"))
print(logging.error("Run 'git submodule init' followed by 'git submodule update'"))
raise
from . import _numba_types
from ._numba_types import *
from . import decorators
from .decorators import *
def test():
raise Exception("run nosetests from the numba directory")
__all__ = _numba_types.__all__ + decorators.__all__
| import sys
import logging
# NOTE: Be sure to keep the logging level commented out before commiting. See:
# https://github.com/numba/numba/issues/31
# A good work around is to make your tests handle a debug flag, per
# numba.tests.test_support.main().
class _RedirectingHandler(logging.Handler):
'''
A log hanlder that applies its formatter and redirect the emission
to a parent handler.
'''
def set_handler(self, handler):
self.handler = handler
def emit(self, record):
# apply our own formatting
record.msg = self.format(record)
record.args = [] # clear the args
# use parent handler to emit record
self.handler.emit(record)
def _config_logger():
root = logging.getLogger(__name__)
format = "\n\033[1m%(levelname)s -- "\
"%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s"
try:
parent_hldr = root.parent.handlers[0]
except IndexError: # parent handler is not initialized?
# build our own handler --- uses sys.stderr by default.
parent_hldr = logging.StreamHandler()
hldr = _RedirectingHandler()
hldr.set_handler(parent_hldr)
fmt = logging.Formatter(format)
hldr.setFormatter(fmt)
root.addHandler(hldr)
root.propagate = False # do not propagate to the root logger
_config_logger()
try:
from . import minivect
except ImportError:
print(logging.error("Did you forget to update submodule minivect?"))
print(logging.error("Run 'git submodule init' followed by 'git submodule update'"))
raise
from . import _numba_types
from ._numba_types import *
from . import decorators
from .decorators import *
def test():
raise Exception("run nosetests from the numba directory")
__all__ = _numba_types.__all__ + decorators.__all__
| Update logging facility. Don't overide root logger with basicConfig. | Update logging facility. Don't overide root logger with basicConfig.
| Python | bsd-2-clause | numba/numba,stefanseefeld/numba,pitrou/numba,stonebig/numba,ssarangi/numba,ssarangi/numba,seibert/numba,gmarkall/numba,seibert/numba,pombredanne/numba,sklam/numba,shiquanwang/numba,stefanseefeld/numba,cpcloud/numba,gmarkall/numba,numba/numba,jriehl/numba,cpcloud/numba,seibert/numba,jriehl/numba,jriehl/numba,pitrou/numba,GaZ3ll3/numba,pitrou/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,jriehl/numba,gdementen/numba,sklam/numba,GaZ3ll3/numba,stefanseefeld/numba,shiquanwang/numba,gmarkall/numba,GaZ3ll3/numba,pitrou/numba,stonebig/numba,gmarkall/numba,gmarkall/numba,ssarangi/numba,seibert/numba,stefanseefeld/numba,stuartarchibald/numba,gdementen/numba,numba/numba,cpcloud/numba,GaZ3ll3/numba,stefanseefeld/numba,gdementen/numba,stuartarchibald/numba,stonebig/numba,pombredanne/numba,stuartarchibald/numba,sklam/numba,ssarangi/numba,pitrou/numba,numba/numba,stuartarchibald/numba,shiquanwang/numba,pombredanne/numba,stonebig/numba,pombredanne/numba,sklam/numba,cpcloud/numba,sklam/numba,stonebig/numba,IntelLabs/numba,gdementen/numba,jriehl/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,IntelLabs/numba,ssarangi/numba,numba/numba,GaZ3ll3/numba,cpcloud/numba,gdementen/numba | <REPLACE_OLD> numba.tests.test_support.main().
logging.basicConfig(#level=logging.DEBUG,
format="\n\033[1m%(levelname)s <REPLACE_NEW> numba.tests.test_support.main().
class _RedirectingHandler(logging.Handler):
'''
A log hanlder that applies its formatter and redirect the emission
to a parent handler.
'''
def set_handler(self, handler):
self.handler = handler
def emit(self, record):
# apply our own formatting
record.msg = self.format(record)
record.args = [] # clear the args
# use parent handler to emit record
self.handler.emit(record)
def _config_logger():
root = logging.getLogger(__name__)
format = "\n\033[1m%(levelname)s <REPLACE_END> <REPLACE_OLD> %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s")
try:
<REPLACE_NEW> "\
"%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s"
try:
parent_hldr = root.parent.handlers[0]
except IndexError: # parent handler is not initialized?
# build our own handler --- uses sys.stderr by default.
parent_hldr = logging.StreamHandler()
hldr = _RedirectingHandler()
hldr.set_handler(parent_hldr)
fmt = logging.Formatter(format)
hldr.setFormatter(fmt)
root.addHandler(hldr)
root.propagate = False # do not propagate to the root logger
_config_logger()
try:
<REPLACE_END> <|endoftext|> import sys
import logging
# NOTE: Be sure to keep the logging level commented out before commiting. See:
# https://github.com/numba/numba/issues/31
# A good work around is to make your tests handle a debug flag, per
# numba.tests.test_support.main().
class _RedirectingHandler(logging.Handler):
'''
A log hanlder that applies its formatter and redirect the emission
to a parent handler.
'''
def set_handler(self, handler):
self.handler = handler
def emit(self, record):
# apply our own formatting
record.msg = self.format(record)
record.args = [] # clear the args
# use parent handler to emit record
self.handler.emit(record)
def _config_logger():
root = logging.getLogger(__name__)
format = "\n\033[1m%(levelname)s -- "\
"%(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s"
try:
parent_hldr = root.parent.handlers[0]
except IndexError: # parent handler is not initialized?
# build our own handler --- uses sys.stderr by default.
parent_hldr = logging.StreamHandler()
hldr = _RedirectingHandler()
hldr.set_handler(parent_hldr)
fmt = logging.Formatter(format)
hldr.setFormatter(fmt)
root.addHandler(hldr)
root.propagate = False # do not propagate to the root logger
_config_logger()
try:
from . import minivect
except ImportError:
print(logging.error("Did you forget to update submodule minivect?"))
print(logging.error("Run 'git submodule init' followed by 'git submodule update'"))
raise
from . import _numba_types
from ._numba_types import *
from . import decorators
from .decorators import *
def test():
raise Exception("run nosetests from the numba directory")
__all__ = _numba_types.__all__ + decorators.__all__
| Update logging facility. Don't overide root logger with basicConfig.
import sys
import logging
# NOTE: Be sure to keep the logging level commented out before commiting. See:
# https://github.com/numba/numba/issues/31
# A good work around is to make your tests handle a debug flag, per
# numba.tests.test_support.main().
logging.basicConfig(#level=logging.DEBUG,
format="\n\033[1m%(levelname)s -- %(module)s:%(lineno)d:%(funcName)s\033[0m\n%(message)s")
try:
from . import minivect
except ImportError:
print(logging.error("Did you forget to update submodule minivect?"))
print(logging.error("Run 'git submodule init' followed by 'git submodule update'"))
raise
from . import _numba_types
from ._numba_types import *
from . import decorators
from .decorators import *
def test():
raise Exception("run nosetests from the numba directory")
__all__ = _numba_types.__all__ + decorators.__all__
|
6a17674897bbb3a44fb2153967e3985dfdb3d5df | zounds/learn/graph.py | zounds/learn/graph.py | import featureflow as ff
from random_samples import ShuffledSamples
from random_samples import InfiniteSampler
from preprocess import PreprocessingPipeline
def learning_pipeline():
class LearningPipeline(ff.BaseModel):
samples = ff.PickleFeature(ff.IteratorNode)
shuffled = ff.PickleFeature(
ShuffledSamples,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
needs=samples)
return LearningPipeline
def infinite_streaming_learning_pipeline(cls):
roots = filter(lambda feature: feature.is_root, cls.features.itervalues())
if len(roots) != 1:
raise ValueError('cls must have a single root feature')
root = roots[0]
class InfiniteLearningPipeline(cls):
dataset = ff.Feature(
InfiniteSampler,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'))
pipeline = ff.ClobberPickleFeature(
PreprocessingPipeline,
needs=cls.features,
store=True)
@classmethod
def load_network(cls):
if not cls.exists():
raise RuntimeError('No network has been trained or saved')
instance = cls()
for p in instance.pipeline:
try:
return p.network
except AttributeError:
pass
raise RuntimeError('There is no network in the pipeline')
root.needs = InfiniteLearningPipeline.dataset
InfiniteLearningPipeline.__name__ = cls.__name__
InfiniteLearningPipeline.__module__ = cls.__module__
return InfiniteLearningPipeline
| import featureflow as ff
from random_samples import ShuffledSamples
from random_samples import InfiniteSampler
from preprocess import PreprocessingPipeline
def learning_pipeline():
class LearningPipeline(ff.BaseModel):
samples = ff.PickleFeature(ff.IteratorNode)
shuffled = ff.PickleFeature(
ShuffledSamples,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
needs=samples)
return LearningPipeline
def infinite_streaming_learning_pipeline(cls):
roots = filter(lambda feature: feature.is_root, cls.features.itervalues())
if len(roots) != 1:
raise ValueError('cls must have a single root feature')
root = roots[0]
class InfiniteLearningPipeline(cls):
dataset = ff.Feature(
InfiniteSampler,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
feature_filter=ff.Var('feature_filter'),
parallel=ff.Var('parallel'))
pipeline = ff.ClobberPickleFeature(
PreprocessingPipeline,
needs=cls.features,
store=True)
@classmethod
def load_network(cls):
if not cls.exists():
raise RuntimeError('No network has been trained or saved')
instance = cls()
for p in instance.pipeline:
try:
return p.network
except AttributeError:
pass
raise RuntimeError('There is no network in the pipeline')
root.needs = InfiniteLearningPipeline.dataset
InfiniteLearningPipeline.__name__ = cls.__name__
InfiniteLearningPipeline.__module__ = cls.__module__
return InfiniteLearningPipeline
| Add a new option allowing client code to turn off parallelism | Add a new option allowing client code to turn off parallelism
| Python | mit | JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds | <REPLACE_OLD> dtype=ff.Var('dtype'))
<REPLACE_NEW> dtype=ff.Var('dtype'),
feature_filter=ff.Var('feature_filter'),
parallel=ff.Var('parallel'))
<REPLACE_END> <|endoftext|> import featureflow as ff
from random_samples import ShuffledSamples
from random_samples import InfiniteSampler
from preprocess import PreprocessingPipeline
def learning_pipeline():
class LearningPipeline(ff.BaseModel):
samples = ff.PickleFeature(ff.IteratorNode)
shuffled = ff.PickleFeature(
ShuffledSamples,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
needs=samples)
return LearningPipeline
def infinite_streaming_learning_pipeline(cls):
roots = filter(lambda feature: feature.is_root, cls.features.itervalues())
if len(roots) != 1:
raise ValueError('cls must have a single root feature')
root = roots[0]
class InfiniteLearningPipeline(cls):
dataset = ff.Feature(
InfiniteSampler,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
feature_filter=ff.Var('feature_filter'),
parallel=ff.Var('parallel'))
pipeline = ff.ClobberPickleFeature(
PreprocessingPipeline,
needs=cls.features,
store=True)
@classmethod
def load_network(cls):
if not cls.exists():
raise RuntimeError('No network has been trained or saved')
instance = cls()
for p in instance.pipeline:
try:
return p.network
except AttributeError:
pass
raise RuntimeError('There is no network in the pipeline')
root.needs = InfiniteLearningPipeline.dataset
InfiniteLearningPipeline.__name__ = cls.__name__
InfiniteLearningPipeline.__module__ = cls.__module__
return InfiniteLearningPipeline
| Add a new option allowing client code to turn off parallelism
import featureflow as ff
from random_samples import ShuffledSamples
from random_samples import InfiniteSampler
from preprocess import PreprocessingPipeline
def learning_pipeline():
class LearningPipeline(ff.BaseModel):
samples = ff.PickleFeature(ff.IteratorNode)
shuffled = ff.PickleFeature(
ShuffledSamples,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'),
needs=samples)
return LearningPipeline
def infinite_streaming_learning_pipeline(cls):
roots = filter(lambda feature: feature.is_root, cls.features.itervalues())
if len(roots) != 1:
raise ValueError('cls must have a single root feature')
root = roots[0]
class InfiniteLearningPipeline(cls):
dataset = ff.Feature(
InfiniteSampler,
nsamples=ff.Var('nsamples'),
dtype=ff.Var('dtype'))
pipeline = ff.ClobberPickleFeature(
PreprocessingPipeline,
needs=cls.features,
store=True)
@classmethod
def load_network(cls):
if not cls.exists():
raise RuntimeError('No network has been trained or saved')
instance = cls()
for p in instance.pipeline:
try:
return p.network
except AttributeError:
pass
raise RuntimeError('There is no network in the pipeline')
root.needs = InfiniteLearningPipeline.dataset
InfiniteLearningPipeline.__name__ = cls.__name__
InfiniteLearningPipeline.__module__ = cls.__module__
return InfiniteLearningPipeline
|
47c1dfd602281c56973de0d8afe64b923eb29592 | test/test_env.py | test/test_env.py | from _ebcf_alexa import env
from unittest.mock import patch, call
import pytest
@pytest.yield_fixture
def mock_now():
with patch.object(env, 'now') as now:
yield now
@patch('datetime.datetime')
def test_now_is_utc(fake_datetime):
assert env.now()
assert fake_datetime.now.call_args == call(tz=env.UTC)
def test_local_now(mock_now):
assert env.localnow() == mock_now.return_value.astimezone.return_value
assert mock_now.return_value.astimezone.call_args == call(env.TZ)
def test_date(mock_now):
assert env.date() == mock_now.return_value.date.return_value
def test_local_date():
with patch.object(env, 'localnow') as ln:
assert env.localdate() == ln.return_value.date.return_value
| Add unit tests for env module. | Add unit tests for env module.
These are pretty simple - just tests wiring to datetime and pytz
| Python | mit | dmotles/ebcf-alexa | <INSERT> from _ebcf_alexa import env
from unittest.mock import patch, call
import pytest
@pytest.yield_fixture
def mock_now():
<INSERT_END> <INSERT> with patch.object(env, 'now') as now:
yield now
@patch('datetime.datetime')
def test_now_is_utc(fake_datetime):
assert env.now()
assert fake_datetime.now.call_args == call(tz=env.UTC)
def test_local_now(mock_now):
assert env.localnow() == mock_now.return_value.astimezone.return_value
assert mock_now.return_value.astimezone.call_args == call(env.TZ)
def test_date(mock_now):
assert env.date() == mock_now.return_value.date.return_value
def test_local_date():
with patch.object(env, 'localnow') as ln:
assert env.localdate() == ln.return_value.date.return_value
<INSERT_END> <|endoftext|> from _ebcf_alexa import env
from unittest.mock import patch, call
import pytest
@pytest.yield_fixture
def mock_now():
with patch.object(env, 'now') as now:
yield now
@patch('datetime.datetime')
def test_now_is_utc(fake_datetime):
assert env.now()
assert fake_datetime.now.call_args == call(tz=env.UTC)
def test_local_now(mock_now):
assert env.localnow() == mock_now.return_value.astimezone.return_value
assert mock_now.return_value.astimezone.call_args == call(env.TZ)
def test_date(mock_now):
assert env.date() == mock_now.return_value.date.return_value
def test_local_date():
with patch.object(env, 'localnow') as ln:
assert env.localdate() == ln.return_value.date.return_value
| Add unit tests for env module.
These are pretty simple - just tests wiring to datetime and pytz
|
|
2eb8dfdfdc31c5315546ff7c89cd59f8d6cb4727 | tacker/__init__.py | tacker/__init__.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
gettext.install('tacker', unicode=1)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
import six
if six.PY2:
gettext.install('tacker', unicode=1)
else:
gettext.install('tacker')
| Fix gettext wrong argument error in py34 | Fix gettext wrong argument error in py34
Closes-Bug: #1550202
Change-Id: I468bef7a8c0a9fa93576744e7869dfa5f2569fa0
| Python | apache-2.0 | priya-pp/Tacker,openstack/tacker,zeinsteinz/tacker,trozet/tacker,priya-pp/Tacker,stackforge/tacker,openstack/tacker,trozet/tacker,openstack/tacker,stackforge/tacker,zeinsteinz/tacker | <REPLACE_OLD> gettext
gettext.install('tacker', unicode=1)
<REPLACE_NEW> gettext
import six
if six.PY2:
gettext.install('tacker', unicode=1)
else:
gettext.install('tacker')
<REPLACE_END> <|endoftext|> # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
import six
if six.PY2:
gettext.install('tacker', unicode=1)
else:
gettext.install('tacker')
| Fix gettext wrong argument error in py34
Closes-Bug: #1550202
Change-Id: I468bef7a8c0a9fa93576744e7869dfa5f2569fa0
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gettext
gettext.install('tacker', unicode=1)
|
4e6458bddec9758da609c681a0ea05b43c399f50 | bot/multithreading/worker/pool/workers/main.py | bot/multithreading/worker/pool/workers/main.py | import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
| Create a worker pool to handle pool of workers that can grow or shrink as necessary | Create a worker pool to handle pool of workers that can grow or shrink as necessary
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | <REPLACE_OLD> <REPLACE_NEW> import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
<REPLACE_END> <|endoftext|> import queue
from bot.multithreading.work import Work
from bot.multithreading.worker import QueueWorker
from bot.multithreading.worker.pool.name_generator import WorkerPoolNameGenerator
from bot.multithreading.worker.pool.spawner import WorkerSpawner
class QueueWorkerPool(QueueWorker):
def __init__(self, base_name: str, work_queue: queue.Queue, error_handler: callable, worker_starter: callable,
min_workers: int, max_workers: int, max_seconds_idle: int):
"""
:param min_workers: Minimum number of workers that must be running at every time ready to accept works.
:param max_workers: Maximum number of workers that can be spawned on heavy workload situations.
:param max_seconds_idle: Maximum number of seconds that the additional workers over min_workers
that were spawned will remain alive without processing works (ie. in idle state).
"""
super().__init__(base_name, work_queue, error_handler)
name_generator = WorkerPoolNameGenerator(base_name, max_workers, max_seconds_idle)
self.spawner = WorkerSpawner(
name_generator, self.queue, error_handler, worker_starter,
min_workers, max_workers, max_seconds_idle
)
def start(self):
# called from main thread
self.spawner.spawn_initial_workers()
def run(self):
# this worker is not meant to be run, it only spawns workers when needed
pass
def post(self, work: Work):
# put on the queue
super().post(work)
# this should be quick and performs no I/O, so posting it to another worker would be inefficient
self.spawner.spawn_worker_if_needed()
| Create a worker pool to handle pool of workers that can grow or shrink as necessary
|
|
0097f33900b6d75df38b28012a1e09fb03e22326 | driller/tasks.py | driller/tasks.py | import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
| import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
| Remove out_dir from the drill task's list of arguments | Remove out_dir from the drill task's list of arguments
| Python | bsd-2-clause | shellphish/driller | <DELETE> out_dir, <DELETE_END> <|endoftext|> import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
| Remove out_dir from the drill task's list of arguments
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
cd2ff46284a8144755b880c035d0a89938474955 | salt/grains/extra.py | salt/grains/extra.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
# Import python libs
import os
# Import third party libs
import yaml
import logging
# Import salt libs
import salt.utils.files
log = logging.getLogger(__name__)
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
return {'shell': os.environ.get('SHELL', '/bin/sh')}
def config():
'''
Return the grains set in the grains file
'''
if 'conf_file' not in __opts__:
return {}
if os.path.isdir(__opts__['conf_file']):
gfn = os.path.join(
__opts__['conf_file'],
'grains'
)
else:
gfn = os.path.join(
os.path.dirname(__opts__['conf_file']),
'grains'
)
if os.path.isfile(gfn):
with salt.utils.files.fopen(gfn, 'rb') as fp_:
try:
return yaml.safe_load(fp_.read())
except Exception:
log.warning("Bad syntax in grains file! Skipping.")
return {}
return {}
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
# Import python libs
import os
# Import third party libs
import yaml
import logging
# Import salt libs
import salt.utils.files
import salt.utils.platform
log = logging.getLogger(__name__)
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
if salt.utils.platform.is_windows():
env_var = 'COMSPEC'
default = r'C:\Windows\system32\cmd.exe'
else:
env_var = 'SHELL'
default = '/bin/sh'
return {'shell': os.environ.get(env_var, default)}
def config():
'''
Return the grains set in the grains file
'''
if 'conf_file' not in __opts__:
return {}
if os.path.isdir(__opts__['conf_file']):
gfn = os.path.join(
__opts__['conf_file'],
'grains'
)
else:
gfn = os.path.join(
os.path.dirname(__opts__['conf_file']),
'grains'
)
if os.path.isfile(gfn):
with salt.utils.files.fopen(gfn, 'rb') as fp_:
try:
return yaml.safe_load(fp_.read())
except Exception:
log.warning("Bad syntax in grains file! Skipping.")
return {}
return {}
| Return COMSPEC as the shell for Windows | Return COMSPEC as the shell for Windows
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | <REPLACE_OLD> salt.utils.files
log <REPLACE_NEW> salt.utils.files
import salt.utils.platform
log <REPLACE_END> <INSERT> if salt.utils.platform.is_windows():
env_var = 'COMSPEC'
default = r'C:\Windows\system32\cmd.exe'
else:
env_var = 'SHELL'
default = '/bin/sh'
<INSERT_END> <REPLACE_OLD> os.environ.get('SHELL', '/bin/sh')}
def <REPLACE_NEW> os.environ.get(env_var, default)}
def <REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
from __future__ import absolute_import
# Import python libs
import os
# Import third party libs
import yaml
import logging
# Import salt libs
import salt.utils.files
import salt.utils.platform
log = logging.getLogger(__name__)
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
if salt.utils.platform.is_windows():
env_var = 'COMSPEC'
default = r'C:\Windows\system32\cmd.exe'
else:
env_var = 'SHELL'
default = '/bin/sh'
return {'shell': os.environ.get(env_var, default)}
def config():
'''
Return the grains set in the grains file
'''
if 'conf_file' not in __opts__:
return {}
if os.path.isdir(__opts__['conf_file']):
gfn = os.path.join(
__opts__['conf_file'],
'grains'
)
else:
gfn = os.path.join(
os.path.dirname(__opts__['conf_file']),
'grains'
)
if os.path.isfile(gfn):
with salt.utils.files.fopen(gfn, 'rb') as fp_:
try:
return yaml.safe_load(fp_.read())
except Exception:
log.warning("Bad syntax in grains file! Skipping.")
return {}
return {}
| Return COMSPEC as the shell for Windows
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# Import python libs
import os
# Import third party libs
import yaml
import logging
# Import salt libs
import salt.utils.files
log = logging.getLogger(__name__)
def shell():
'''
Return the default shell to use on this system
'''
# Provides:
# shell
return {'shell': os.environ.get('SHELL', '/bin/sh')}
def config():
'''
Return the grains set in the grains file
'''
if 'conf_file' not in __opts__:
return {}
if os.path.isdir(__opts__['conf_file']):
gfn = os.path.join(
__opts__['conf_file'],
'grains'
)
else:
gfn = os.path.join(
os.path.dirname(__opts__['conf_file']),
'grains'
)
if os.path.isfile(gfn):
with salt.utils.files.fopen(gfn, 'rb') as fp_:
try:
return yaml.safe_load(fp_.read())
except Exception:
log.warning("Bad syntax in grains file! Skipping.")
return {}
return {}
|
8e0cf99380b284ff4f7b962f622933c243828be7 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="django-posgtres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="[email protected]",
maintainer="Daniele Esposti",
maintainer_email="[email protected]",
url="http://github.com/expobrain/django-postgres-geometry",
)
| from setuptools import setup, find_packages
setup(
name="django-postgres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="[email protected]",
maintainer="Daniele Esposti",
maintainer_email="[email protected]",
url="http://github.com/expobrain/django-postgres-geometry",
)
| Fix typo in package name | Fix typo in package name
| Python | mit | team23/django-postgres-geometry | <REPLACE_OLD> name="django-posgtres-geometry",
<REPLACE_NEW> name="django-postgres-geometry",
<REPLACE_END> <|endoftext|> from setuptools import setup, find_packages
setup(
name="django-postgres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="[email protected]",
maintainer="Daniele Esposti",
maintainer_email="[email protected]",
url="http://github.com/expobrain/django-postgres-geometry",
)
| Fix typo in package name
from setuptools import setup, find_packages
setup(
name="django-posgtres-geometry",
version="0.1.2",
packages=find_packages(),
install_requires=['django', 'psycopg2'],
description="Django ORM field for Postgres geometry types",
author="Daniele Esposti",
author_email="[email protected]",
maintainer="Daniele Esposti",
maintainer_email="[email protected]",
url="http://github.com/expobrain/django-postgres-geometry",
)
|
5b7b301c3f9dd906b8450acc5b28dbcb35fe973a | candidates/management/commands/candidates_fix_not_standing.py | candidates/management/commands/candidates_fix_not_standing.py | from __future__ import print_function, unicode_literals
from django.core.management.base import BaseCommand
from popolo.models import Membership
from candidates.models import PersonExtra
class Command(BaseCommand):
help = "Find elections in not_standing that should be removed"
def add_arguments(self, parser):
parser.add_argument(
'--delete', action='store_true',
help="Don't just find these broken cases, also fix them",
)
def handle(self, *args, **options):
for person_extra in PersonExtra.objects.filter(
not_standing__isnull=False
):
election_to_remove = []
for election in person_extra.not_standing.all():
candidacies = Membership.objects.filter(
person=person_extra.base,
extra__election=election,
role=election.candidate_membership_role,
)
if candidacies.exists():
election_to_remove.append(election)
# Now print out the elections we would remove from
# not_standing for that person. (And, if --delete is
# specified, actually remove it.)
for election in election_to_remove:
fmt = '{person} is marked as not standing in {election}'
print(fmt.format(person=person_extra.base, election=election))
print(' ... but also has a candidacy in that election!')
if options['delete']:
fmt = " Deleting {election} from {person}'s not_standing"
print(fmt.format(
election=election.name,
person=person_extra.base.name,
))
person_extra.not_standing.remove(election)
| Add a script to fix the not_standing relationships of people | Add a script to fix the not_standing relationships of people
There was a bug in bulk adding people which meant that their
"not_standing" status for an election wasn't removed when reinstating
them as a candidate in that election.
That bug has been fixed in the parent commit, but there are still people
in the database who have a candidacy (represented by a Membership and
MembershipExtra) in an election, but also have that election in their
not_standing.
This commit introduces a script that will find those cases, report them
and (if --delete is specified) fix the inconsistency by removing the
election from their not_standing.
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | <REPLACE_OLD> <REPLACE_NEW> from __future__ import print_function, unicode_literals
from django.core.management.base import BaseCommand
from popolo.models import Membership
from candidates.models import PersonExtra
class Command(BaseCommand):
help = "Find elections in not_standing that should be removed"
def add_arguments(self, parser):
parser.add_argument(
'--delete', action='store_true',
help="Don't just find these broken cases, also fix them",
)
def handle(self, *args, **options):
for person_extra in PersonExtra.objects.filter(
not_standing__isnull=False
):
election_to_remove = []
for election in person_extra.not_standing.all():
candidacies = Membership.objects.filter(
person=person_extra.base,
extra__election=election,
role=election.candidate_membership_role,
)
if candidacies.exists():
election_to_remove.append(election)
# Now print out the elections we would remove from
# not_standing for that person. (And, if --delete is
# specified, actually remove it.)
for election in election_to_remove:
fmt = '{person} is marked as not standing in {election}'
print(fmt.format(person=person_extra.base, election=election))
print(' ... but also has a candidacy in that election!')
if options['delete']:
fmt = " Deleting {election} from {person}'s not_standing"
print(fmt.format(
election=election.name,
person=person_extra.base.name,
))
person_extra.not_standing.remove(election)
<REPLACE_END> <|endoftext|> from __future__ import print_function, unicode_literals
from django.core.management.base import BaseCommand
from popolo.models import Membership
from candidates.models import PersonExtra
class Command(BaseCommand):
help = "Find elections in not_standing that should be removed"
def add_arguments(self, parser):
parser.add_argument(
'--delete', action='store_true',
help="Don't just find these broken cases, also fix them",
)
def handle(self, *args, **options):
for person_extra in PersonExtra.objects.filter(
not_standing__isnull=False
):
election_to_remove = []
for election in person_extra.not_standing.all():
candidacies = Membership.objects.filter(
person=person_extra.base,
extra__election=election,
role=election.candidate_membership_role,
)
if candidacies.exists():
election_to_remove.append(election)
# Now print out the elections we would remove from
# not_standing for that person. (And, if --delete is
# specified, actually remove it.)
for election in election_to_remove:
fmt = '{person} is marked as not standing in {election}'
print(fmt.format(person=person_extra.base, election=election))
print(' ... but also has a candidacy in that election!')
if options['delete']:
fmt = " Deleting {election} from {person}'s not_standing"
print(fmt.format(
election=election.name,
person=person_extra.base.name,
))
person_extra.not_standing.remove(election)
| Add a script to fix the not_standing relationships of people
There was a bug in bulk adding people which meant that their
"not_standing" status for an election wasn't removed when reinstating
them as a candidate in that election.
That bug has been fixed in the parent commit, but there are still people
in the database who have a candidacy (represented by a Membership and
MembershipExtra) in an election, but also have that election in their
not_standing.
This commit introduces a script that will find those cases, report them
and (if --delete is specified) fix the inconsistency by removing the
election from their not_standing.
|
|
7f05b622ab6cb1202d2d00ec1bcac2c5bbb326b7 | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.9.3 | Increment version number to 0.9.3
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | <REPLACE_OLD> "0.9.2"
__version_info__ <REPLACE_NEW> "0.9.3"
__version_info__ <REPLACE_END> <|endoftext|> """Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.9.3
"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
3364747195f0f3d2711169fb92c250fc10823d82 | default_settings.py | default_settings.py | # Copyright 2014 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
UV4 = os.path.join("C:","Keil","UV4","UV4.exe")
IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe')
# Be able to locate project generator anywhere in a project
# By default it's tools/project_generator (2 folders deep from root)
PROJECT_ROOT= os.path.join('..','..')
if os.name == "posix":
# Expects either arm-none-eabi to be installed here, or
# even better, a symlink from /usr/local/arm-none-eabi to the most recent
# version.
gcc_bin_path = "/usr/local/arm-none-eabi/bin/"
elif os.name == "nt":
gcc_bin_path = ""
try:
from user_settings import *
except:
pass
| # Copyright 2014 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
UV4 = os.path.join("C:","Keil","UV4","UV4.exe")
IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe')
# Be able to locate project generator anywhere in a project
# By default it's tools/project_generator (2 folders deep from root)
PROJECT_ROOT= os.path.join('..','..')
if os.name == "posix":
# Expects either arm-none-eabi to be installed here, or
# even better, a symlink from /usr/local/arm-none-eabi to the most recent
# version.
gcc_bin_path = "/usr/local/arm-none-eabi/bin/"
elif os.name == "nt":
gcc_bin_path = ""
try:
from user_settings import *
except:
logging.info("Using default settings.")
| Add message if you're using default settings | Add message if you're using default settings
| Python | apache-2.0 | 0xc0170/valinor,sarahmarshy/project_generator,autopulated/valinor,ARMmbed/valinor,sg-/project_generator,ohagendorf/project_generator,molejar/project_generator,aethaniel/project_generator,0xc0170/project_generator,sg-/project_generator,project-generator/project_generator,hwfwgrp/project_generator | <REPLACE_OLD> *
except:
pass
<REPLACE_NEW> *
except:
logging.info("Using default settings.")
<REPLACE_END> <|endoftext|> # Copyright 2014 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
UV4 = os.path.join("C:","Keil","UV4","UV4.exe")
IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe')
# Be able to locate project generator anywhere in a project
# By default it's tools/project_generator (2 folders deep from root)
PROJECT_ROOT= os.path.join('..','..')
if os.name == "posix":
# Expects either arm-none-eabi to be installed here, or
# even better, a symlink from /usr/local/arm-none-eabi to the most recent
# version.
gcc_bin_path = "/usr/local/arm-none-eabi/bin/"
elif os.name == "nt":
gcc_bin_path = ""
try:
from user_settings import *
except:
logging.info("Using default settings.")
| Add message if you're using default settings
# Copyright 2014 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
UV4 = os.path.join("C:","Keil","UV4","UV4.exe")
IARBUILD = os.path.join('C:','Program Files (x86)','IAR Systems','Embedded Workbench 7.0','common','bin','IarBuild.exe')
# Be able to locate project generator anywhere in a project
# By default it's tools/project_generator (2 folders deep from root)
PROJECT_ROOT= os.path.join('..','..')
if os.name == "posix":
# Expects either arm-none-eabi to be installed here, or
# even better, a symlink from /usr/local/arm-none-eabi to the most recent
# version.
gcc_bin_path = "/usr/local/arm-none-eabi/bin/"
elif os.name == "nt":
gcc_bin_path = ""
try:
from user_settings import *
except:
pass
|
759f6a2e4ced9ce9beeda01e638f109d946050b1 | server/migrations/0006_auto_20150811_0811.py | server/migrations/0006_auto_20150811_0811.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| Add in the first checkin date if it doesn't exist | Add in the first checkin date if it doesn't exist
| Python | apache-2.0 | sheagcraig/sal,salopensource/sal,salopensource/sal,macjustice/sal,chasetb/sal,salopensource/sal,sheagcraig/sal,erikng/sal,macjustice/sal,erikng/sal,chasetb/sal,chasetb/sal,erikng/sal,macjustice/sal,macjustice/sal,sheagcraig/sal,erikng/sal,sheagcraig/sal,chasetb/sal,salopensource/sal | <REPLACE_OLD> unicode_literals
from <REPLACE_NEW> unicode_literals
from django.shortcuts import get_object_or_404
from <REPLACE_END> <REPLACE_OLD> migrations
class <REPLACE_NEW> migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class <REPLACE_END> <INSERT> migrations.RunPython(add_initial_date),
<INSERT_END> <|endoftext|> # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import get_object_or_404
from django.db import models, migrations
def add_initial_date(apps, schema_editor):
Machine = apps.get_model("server", "Machine")
for machine in Machine.objects.all():
if not machine.first_checkin:
machine.first_checkin = machine.last_checkin
machine.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
migrations.RunPython(add_initial_date),
]
| Add in the first checkin date if it doesn't exist
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('server', '0005_auto_20150717_1827'),
]
operations = [
migrations.AddField(
model_name='machine',
name='first_checkin',
field=models.DateTimeField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='machine',
name='sal_version',
field=models.TextField(null=True, blank=True),
),
]
|
264075d9b313f5c2677e32fcf5d340bba73f0b0e | corehq/apps/app_manager/migrations/0019_exchangeapplication_required_privileges.py | corehq/apps/app_manager/migrations/0019_exchangeapplication_required_privileges.py | # Generated by Django 2.2.24 on 2021-09-13 21:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(null=True),
),
]
| # Generated by Django 2.2.24 on 2021-09-14 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(help_text='Space-separated list of privilege strings from corehq.privileges', null=True),
),
]
| Fix migration with help text | Fix migration with help text
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | <REPLACE_OLD> 2021-09-13 21:04
from <REPLACE_NEW> 2021-09-14 17:54
from <REPLACE_END> <REPLACE_OLD> field=models.TextField(null=True),
<REPLACE_NEW> field=models.TextField(help_text='Space-separated list of privilege strings from corehq.privileges', null=True),
<REPLACE_END> <|endoftext|> # Generated by Django 2.2.24 on 2021-09-14 17:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(help_text='Space-separated list of privilege strings from corehq.privileges', null=True),
),
]
| Fix migration with help text
# Generated by Django 2.2.24 on 2021-09-13 21:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_manager', '0018_migrate_case_search_labels'),
]
operations = [
migrations.AddField(
model_name='exchangeapplication',
name='required_privileges',
field=models.TextField(null=True),
),
]
|
0c3529bd264d5512e31d828c65676baff6edefa6 | pinax/waitinglist/templatetags/pinax_waitinglist_tags.py | pinax/waitinglist/templatetags/pinax_waitinglist_tags.py | from django import template
from ..forms import WaitingListEntryForm
register = template.Library()
@register.assignment_tag
def waitinglist_entry_form():
"""
Get a (new) form object to post a new comment.
Syntax::
{% waitinglist_entry_form as [varname] %}
"""
return WaitingListEntryForm()
| from django import template
from ..forms import WaitingListEntryForm
register = template.Library()
@register.simple_tag(takes_context=True)
def waitinglist_entry_form(context):
"""
Get a (new) form object to post a new comment.
Syntax::
{% waitinglist_entry_form as [varname] %}
"""
initial = {}
if "request" in context:
initial.update({
"referrer": context["request"].META.get("HTTP_REFERER", ""),
"campaign": context["request"].GET.get("wlc", "")
})
return WaitingListEntryForm(initial=initial)
| Update template tag to also take context | Update template tag to also take context
| Python | mit | pinax/pinax-waitinglist,pinax/pinax-waitinglist | <REPLACE_OLD> template.Library()
@register.assignment_tag
def waitinglist_entry_form():
<REPLACE_NEW> template.Library()
@register.simple_tag(takes_context=True)
def waitinglist_entry_form(context):
<REPLACE_END> <INSERT> initial = {}
if "request" in context:
initial.update({
"referrer": context["request"].META.get("HTTP_REFERER", ""),
"campaign": context["request"].GET.get("wlc", "")
})
<INSERT_END> <REPLACE_OLD> WaitingListEntryForm()
<REPLACE_NEW> WaitingListEntryForm(initial=initial)
<REPLACE_END> <|endoftext|> from django import template
from ..forms import WaitingListEntryForm
register = template.Library()
@register.simple_tag(takes_context=True)
def waitinglist_entry_form(context):
"""
Get a (new) form object to post a new comment.
Syntax::
{% waitinglist_entry_form as [varname] %}
"""
initial = {}
if "request" in context:
initial.update({
"referrer": context["request"].META.get("HTTP_REFERER", ""),
"campaign": context["request"].GET.get("wlc", "")
})
return WaitingListEntryForm(initial=initial)
| Update template tag to also take context
from django import template
from ..forms import WaitingListEntryForm
register = template.Library()
@register.assignment_tag
def waitinglist_entry_form():
"""
Get a (new) form object to post a new comment.
Syntax::
{% waitinglist_entry_form as [varname] %}
"""
return WaitingListEntryForm()
|
be3ee0a06ec350431e66efbcbcead28075056f55 | django_extensions/tests/models.py | django_extensions/tests/models.py | from django.db import models
try:
from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField
except ImportError:
class EncryptedCharField():
def __init__(self, **kwargs):
pass;
class EncryptedTextField():
def __init__(self, **kwargs):
pass;
class Secret(models.Model):
name = EncryptedCharField(blank=True, max_length=255)
text = EncryptedTextField(blank=True)
| from django.db import models
try:
from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField
except ImportError:
class EncryptedCharField():
def __init__(self, **kwargs):
pass
class EncryptedTextField():
def __init__(self, **kwargs):
pass
class Secret(models.Model):
name = EncryptedCharField(blank=True, max_length=255)
text = EncryptedTextField(blank=True)
| Remove bogus semicolons, thanks justinlilly | Remove bogus semicolons, thanks justinlilly
| Python | mit | django-extensions/django-extensions,levic/django-extensions,linuxmaniac/django-extensions,kevgathuku/django-extensions,ewjoachim/django-extensions,joeyespo/django-extensions,Christophe31/django-extensions,Moulde/django-extensions,nikolas/django-extensions,barseghyanartur/django-extensions,atchariya/django-extensions,frewsxcv/django-extensions,mandx/django-extensions,marctc/django-extensions,ewjoachim/django-extensions,frewsxcv/django-extensions,gdoermann/django-extensions,artscoop/django-extensions,marctc/django-extensions,zefciu/django-extensions,levic/django-extensions,django-extensions/django-extensions,helenst/django-extensions,t1m0thy/django-extensions,atchariya/django-extensions,Moulde/django-extensions,JoseTomasTocino/django-extensions,barseghyanartur/django-extensions,jpadilla/django-extensions,helenst/django-extensions,rodo/django-extensions,haakenlid/django-extensions,Christophe31/django-extensions,mandx/django-extensions,github-account-because-they-want-it/django-extensions,fusionbox/django-extensions,django-extensions/django-extensions,maroux/django-extensions,ewjoachim/django-extensions,zefciu/django-extensions,t1m0thy/django-extensions,haakenlid/django-extensions,bionikspoon/django-extensions,gvangool/django-extensions,JoseTomasTocino/django-extensions,atchariya/django-extensions,dpetzold/django-extensions,zefciu/django-extensions,artscoop/django-extensions,github-account-because-they-want-it/django-extensions,lamby/django-extensions,linuxmaniac/django-extensions,haakenlid/django-extensions,barseghyanartur/django-extensions,nikolas/django-extensions,lamby/django-extensions,Moulde/django-extensions,mandx/django-extensions,helenst/django-extensions,dpetzold/django-extensions,jpadilla/django-extensions,VishvajitP/django-extensions,ctrl-alt-d/django-extensions,t1m0thy/django-extensions,bionikspoon/django-extensions,linuxmaniac/django-extensions,JoseTomasTocino/django-extensions,dpetzold/django-extensions,levic/django-extensions,kevgathuku/django-extensions,gdoermann/django-extensions,artscoop/django-extensions,gvangool/django-extensions,nikolas/django-extensions,ctrl-alt-d/django-extensions,fusionbox/django-extensions,joeyespo/django-extensions,maroux/django-extensions,ctrl-alt-d/django-extensions,VishvajitP/django-extensions,marctc/django-extensions,lamby/django-extensions,bionikspoon/django-extensions,joeyespo/django-extensions,github-account-because-they-want-it/django-extensions,kevgathuku/django-extensions,VishvajitP/django-extensions,maroux/django-extensions,gvangool/django-extensions,rodo/django-extensions,rodo/django-extensions,jpadilla/django-extensions,frewsxcv/django-extensions | <REPLACE_OLD> pass;
<REPLACE_NEW> pass
<REPLACE_END> <REPLACE_OLD> pass;
class <REPLACE_NEW> pass
class <REPLACE_END> <|endoftext|> from django.db import models
try:
from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField
except ImportError:
class EncryptedCharField():
def __init__(self, **kwargs):
pass
class EncryptedTextField():
def __init__(self, **kwargs):
pass
class Secret(models.Model):
name = EncryptedCharField(blank=True, max_length=255)
text = EncryptedTextField(blank=True)
| Remove bogus semicolons, thanks justinlilly
from django.db import models
try:
from django_extensions.db.fields.encrypted import EncryptedTextField, EncryptedCharField
except ImportError:
class EncryptedCharField():
def __init__(self, **kwargs):
pass;
class EncryptedTextField():
def __init__(self, **kwargs):
pass;
class Secret(models.Model):
name = EncryptedCharField(blank=True, max_length=255)
text = EncryptedTextField(blank=True)
|
e233352d5016c2b57ec4edbc4366ca4347bc1d98 | demo/start_servers.py | demo/start_servers.py | """
start_servers.py
<Purpose>
A simple script to start the three cloud-side Uptane servers:
the Director (including its per-vehicle repositories)
the Image Repository
the Timeserver
To run the demo services in non-interactive mode, run:
python start_servers.py
To run the demo services in interactive mode, run:
python -i -c "from demo.start_servers import *; main()"
In either mode, the demo services will respond to commands sent via XMLRPC.
"""
import threading
import demo
import demo.demo_timeserver as dt
import demo.demo_director as dd
import demo.demo_image_repo as di
from six.moves import xmlrpc_server
def main():
# Start demo Image Repo, including http server and xmlrpc listener (for
# webdemo)
di.clean_slate()
# Start demo Director, including http server and xmlrpc listener (for
# manifests, registrations, and webdemo)
dd.clean_slate()
# Start demo Timeserver, including xmlrpc listener (for requests from demo
# Primary)
dt.listen()
if __name__ == '__main__':
main()
| Create a single script to run the three demo services | DEMO: Create a single script to run the three demo services
(image repo, director, and timeserver)
| Python | mit | uptane/uptane,awwad/uptane,awwad/uptane,uptane/uptane | <INSERT> """
start_servers.py
<Purpose>
<INSERT_END> <INSERT> A simple script to start the three cloud-side Uptane servers:
the Director (including its per-vehicle repositories)
the Image Repository
the Timeserver
To run the demo services in non-interactive mode, run:
python start_servers.py
To run the demo services in interactive mode, run:
python -i -c "from demo.start_servers import *; main()"
In either mode, the demo services will respond to commands sent via XMLRPC.
"""
import threading
import demo
import demo.demo_timeserver as dt
import demo.demo_director as dd
import demo.demo_image_repo as di
from six.moves import xmlrpc_server
def main():
# Start demo Image Repo, including http server and xmlrpc listener (for
# webdemo)
di.clean_slate()
# Start demo Director, including http server and xmlrpc listener (for
# manifests, registrations, and webdemo)
dd.clean_slate()
# Start demo Timeserver, including xmlrpc listener (for requests from demo
# Primary)
dt.listen()
if __name__ == '__main__':
main()
<INSERT_END> <|endoftext|> """
start_servers.py
<Purpose>
A simple script to start the three cloud-side Uptane servers:
the Director (including its per-vehicle repositories)
the Image Repository
the Timeserver
To run the demo services in non-interactive mode, run:
python start_servers.py
To run the demo services in interactive mode, run:
python -i -c "from demo.start_servers import *; main()"
In either mode, the demo services will respond to commands sent via XMLRPC.
"""
import threading
import demo
import demo.demo_timeserver as dt
import demo.demo_director as dd
import demo.demo_image_repo as di
from six.moves import xmlrpc_server
def main():
# Start demo Image Repo, including http server and xmlrpc listener (for
# webdemo)
di.clean_slate()
# Start demo Director, including http server and xmlrpc listener (for
# manifests, registrations, and webdemo)
dd.clean_slate()
# Start demo Timeserver, including xmlrpc listener (for requests from demo
# Primary)
dt.listen()
if __name__ == '__main__':
main()
| DEMO: Create a single script to run the three demo services
(image repo, director, and timeserver)
|
|
a96046b4b7372cb942509b5e9778d54124319617 | bin/rofi_menu.py | bin/rofi_menu.py | from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
| Add a common helper for selection menus | [rofi] Add a common helper for selection menus
| Python | mit | mpardalos/dotfiles,mpardalos/dotfiles | <INSERT> from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
<INSERT_END> <INSERT> """
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
<INSERT_END> <|endoftext|> from typing import Dict, Callable
from rofi import Rofi
def menu(r: Rofi, prompt: str, options: Dict[str, Callable], *args, **kwargs):
"""
Create a menu using rofi to execute on of some options, all args not documented
are passed directly into Rofi.select
:param options: A dict of strings to show on the menu the action they execute
:param rofi: the rofi instance to use
:returns: The name of the option selected and the value returned by the
function executed, or, if none is selected, None
"""
index, key = r.select(prompt, options.keys(), *args, **kwargs)
if key == -1: return None
name, action = list(options.items())[index]
result = action()
return name, action
| [rofi] Add a common helper for selection menus
|
|
e435592d64dbd4f75a7cc9d1ac8bb17ab4177a2b | erpnext/patches/v4_2/default_website_style.py | erpnext/patches/v4_2/default_website_style.py | import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
| import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
| Fix default website style patch (reload doc) | [minor] Fix default website style patch (reload doc) | Python | agpl-3.0 | gangadharkadam/saloon_erp,hatwar/buyback-erpnext,gangadharkadam/v6_erp,indictranstech/Das_Erpnext,gangadharkadam/vlinkerp,shft117/SteckerApp,sheafferusa/erpnext,mahabuber/erpnext,hernad/erpnext,suyashphadtare/gd-erp,gangadharkadam/letzerp,indictranstech/internal-erpnext,indictranstech/buyback-erp,4commerce-technologies-AG/erpnext,indictranstech/buyback-erp,shitolepriya/test-erp,rohitwaghchaure/New_Theme_Erp,indictranstech/trufil-erpnext,gangadharkadam/v5_erp,mahabuber/erpnext,indictranstech/Das_Erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/verve_test_erp,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,gsnbng/erpnext,hernad/erpnext,sheafferusa/erpnext,mbauskar/phrerp,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/huntercamp_erpnext,netfirms/erpnext,MartinEnder/erpnext-de,gangadhar-kadam/latestchurcherp,hatwar/Das_erpnext,indictranstech/fbd_erpnext,gangadharkadam/saloon_erp,hanselke/erpnext-1,njmube/erpnext,Tejal011089/trufil-erpnext,fuhongliang/erpnext,gangadhar-kadam/helpdesk-erpnext,SPKian/Testing2,pombredanne/erpnext,sagar30051991/ozsmart-erp,gangadharkadam/verveerp,gangadharkadam/v4_erp,gangadhar-kadam/verve_test_erp,mbauskar/phrerp,indictranstech/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,rohitwaghchaure/New_Theme_Erp,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,suyashphadtare/sajil-erp,gangadhar-kadam/verve_test_erp,susuchina/ERPNEXT,gangadharkadam/verveerp,ShashaQin/erpnext,netfirms/erpnext,njmube/erpnext,SPKian/Testing,mbauskar/Das_Erpnext,mbauskar/sapphire-erpnext,gangadhar-kadam/verve_erp,suyashphadtare/vestasi-erp-1,gangadharkadam/vlinkerp,Tejal011089/paypal_erpnext,indictranstech/reciphergroup-erpnext,treejames/erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/v4_erp,tmimori/erpnext,suyashphadtare/vestasi-erp-final,mahabuber/erpnext,indictranstech/osmosis-erpnext,hatwar/focal-erpnext,hatwar/buyback-erpnext,treejames/erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,geekroot/erpnext,shitolepriya/test-erp,Tejal011089/trufil-erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/erpnext_smart,hernad/erpnext,ThiagoGarciaAlves/erpnext,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/erpnext-receipher,BhupeshGupta/erpnext,gangadhar-kadam/helpdesk-erpnext,anandpdoshi/erpnext,meisterkleister/erpnext,suyashphadtare/gd-erp,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,pombredanne/erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_test_erp,Tejal011089/trufil-erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,indictranstech/vestasi-erpnext,Suninus/erpnext,mbauskar/alec_frappe5_erpnext,dieface/erpnext,indictranstech/focal-erpnext,aruizramon/alec_erpnext,gangadharkadam/saloon_erp_install,sagar30051991/ozsmart-erp,indictranstech/vestasi-erpnext,mbauskar/alec_frappe5_erpnext,indictranstech/biggift-erpnext,gmarke/erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,ShashaQin/erpnext,suyashphadtare/gd-erp,suyashphadtare/sajil-final-erp,geekroot/erpnext,Drooids/erpnext,geekroot/erpnext,gangadharkadam/saloon_erp_install,indictranstech/focal-erpnext,suyashphadtare/sajil-erp,gangadharkadam/verveerp,saurabh6790/test-erp,hanselke/erpnext-1,hatwar/Das_erpnext,gangadharkadam/letzerp,gangadhar-kadam/helpdesk-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,mbauskar/helpdesk-erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,gmarke/erpnext,gangadharkadam/v4_erp,MartinEnder/erpnext-de,mbauskar/omnitech-erpnext,Tejal011089/fbd_erpnext,saurabh6790/test-erp,indictranstech/buyback-erp,mbauskar/phrerp,indictranstech/phrerp,indictranstech/trufil-erpnext,Suninus/erpnext,gsnbng/erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,fuhongliang/erpnext,sheafferusa/erpnext,gangadharkadam/saloon_erp,mbauskar/sapphire-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/internal-erpnext,mbauskar/sapphire-erpnext,indictranstech/fbd_erpnext,hanselke/erpnext-1,sheafferusa/erpnext,gangadharkadam/v6_erp,MartinEnder/erpnext-de,4commerce-technologies-AG/erpnext,indictranstech/biggift-erpnext,mbauskar/helpdesk-erpnext,anandpdoshi/erpnext,gangadharkadam/vlinkerp,indictranstech/reciphergroup-erpnext,gangadhar-kadam/verve-erp,mbauskar/helpdesk-erpnext,geekroot/erpnext,gangadharkadam/v5_erp,Tejal011089/digitales_erpnext,indictranstech/tele-erpnext,ThiagoGarciaAlves/erpnext,suyashphadtare/vestasi-erp-final,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,mbauskar/omnitech-erpnext,suyashphadtare/vestasi-erp-final,indictranstech/vestasi-erpnext,saurabh6790/test-erp,treejames/erpnext,pawaranand/phrerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,gangadharkadam/letzerp,aruizramon/alec_erpnext,suyashphadtare/vestasi-update-erp,gangadharkadam/contributionerp,fuhongliang/erpnext,netfirms/erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,meisterkleister/erpnext,gangadhar-kadam/verve_live_erp,tmimori/erpnext,hatwar/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,gangadharkadam/saloon_erp_install,indictranstech/fbd_erpnext,pawaranand/phrerp,gangadharkadam/v6_erp,suyashphadtare/sajil-final-erp,indictranstech/buyback-erp,treejames/erpnext,suyashphadtare/test,mbauskar/alec_frappe5_erpnext,suyashphadtare/vestasi-erp-jan-end,SPKian/Testing,tmimori/erpnext,gangadharkadam/v4_erp,suyashphadtare/vestasi-erp-1,indictranstech/phrerp,suyashphadtare/sajil-final-erp,netfirms/erpnext,gmarke/erpnext,BhupeshGupta/erpnext,indictranstech/tele-erpnext,Tejal011089/osmosis_erpnext,hatwar/focal-erpnext,Tejal011089/digitales_erpnext,suyashphadtare/vestasi-erp-jan-end,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/Das_Erpnext,Tejal011089/osmosis_erpnext,rohitwaghchaure/GenieManager-erpnext,gangadhar-kadam/verve_live_erp,rohitwaghchaure/erpnext-receipher,Tejal011089/digitales_erpnext,shitolepriya/test-erp,shft117/SteckerApp,indictranstech/osmosis-erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_live_erp,hatwar/buyback-erpnext,shft117/SteckerApp,tmimori/erpnext,dieface/erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v5_erp,indictranstech/phrerp,gangadhar-kadam/verve_erp,indictranstech/internal-erpnext,hatwar/buyback-erpnext,gangadharkadam/verveerp,njmube/erpnext,rohitwaghchaure/erpnext-receipher,Aptitudetech/ERPNext,aruizramon/alec_erpnext,gsnbng/erpnext,susuchina/ERPNEXT,gangadharkadam/v5_erp,hanselke/erpnext-1,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/digitales_erpnext,Drooids/erpnext,susuchina/ERPNEXT,njmube/erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/v6_erp,dieface/erpnext,gangadharkadam/contributionerp,ShashaQin/erpnext,saurabh6790/test-erp,suyashphadtare/vestasi-erp-1,SPKian/Testing,suyashphadtare/test,rohitwaghchaure/New_Theme_Erp,hernad/erpnext,rohitwaghchaure/New_Theme_Erp,meisterkleister/erpnext,hatwar/Das_erpnext,4commerce-technologies-AG/erpnext,Tejal011089/osmosis_erpnext,mahabuber/erpnext,suyashphadtare/vestasi-update-erp,pombredanne/erpnext,Tejal011089/trufil-erpnext,gangadharkadam/saloon_erp_install,SPKian/Testing2,mbauskar/Das_Erpnext,indictranstech/reciphergroup-erpnext,ThiagoGarciaAlves/erpnext,gangadharkadam/contributionerp,suyashphadtare/gd-erp,shitolepriya/test-erp,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,Tejal011089/fbd_erpnext,pombredanne/erpnext,dieface/erpnext,hatwar/focal-erpnext,ShashaQin/erpnext,indictranstech/vestasi-erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve-erp,gangadhar-kadam/latestchurcherp,indictranstech/biggift-erpnext,fuhongliang/erpnext,suyashphadtare/sajil-erp,shft117/SteckerApp,indictranstech/erpnext,mbauskar/phrerp,BhupeshGupta/erpnext,indictranstech/tele-erpnext,SPKian/Testing2,aruizramon/alec_erpnext,indictranstech/fbd_erpnext,rohitwaghchaure/erpnext_smart,hatwar/Das_erpnext,pawaranand/phrerp,pawaranand/phrerp,Tejal011089/osmosis_erpnext,sagar30051991/ozsmart-erp,anandpdoshi/erpnext,suyashphadtare/test,indictranstech/phrerp,indictranstech/erpnext,Tejal011089/paypal_erpnext,indictranstech/internal-erpnext,gangadharkadam/saloon_erp,gmarke/erpnext,Tejal011089/paypal_erpnext,sagar30051991/ozsmart-erp,BhupeshGupta/erpnext,Drooids/erpnext,Suninus/erpnext,gangadharkadam/vlinkerp,gangadhar-kadam/verve_live_erp,meisterkleister/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/letzerp,mbauskar/omnitech-demo-erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/focal-erpnext,Tejal011089/paypal_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/erpnext | <REPLACE_OLD> execute():
style_settings <REPLACE_NEW> execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings <REPLACE_END> <|endoftext|> import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
frappe.reload_doc('website', 'doctype', 'style_settings')
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
| [minor] Fix default website style patch (reload doc)
import frappe
from frappe.templates.pages.style_settings import default_properties
def execute():
style_settings = frappe.get_doc("Style Settings", "Style Settings")
if not style_settings.apply_style:
style_settings.update(default_properties)
style_settings.apply_style = 1
style_settings.save()
|
182b94f777b1743671b706c939ce14f89c31efca | lint/queue.py | lint/queue.py | from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
| from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
| Remove MIN_DELAY bc a default setting is guaranteed | Remove MIN_DELAY bc a default setting is guaranteed
| Python | mit | SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3 | <REPLACE_OLD> hit_time
MIN_DELAY = 0.1
def <REPLACE_NEW> hit_time
def <REPLACE_END> <REPLACE_OLD> persist.settings.get('delay', MIN_DELAY)
queue <REPLACE_NEW> persist.settings.get('delay')
queue <REPLACE_END> <|endoftext|> from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay')
queue = Daemon()
| Remove MIN_DELAY bc a default setting is guaranteed
from . import persist
import time
import threading
# Map from view_id to threading.Timer objects
timers = {}
# For compatibility this is a class with unchanged API from SL3.
class Daemon:
def start(self, callback):
self._callback = callback
def hit(self, view):
assert self._callback, "Queue: Can't hit before start."
vid = view.id()
delay = get_delay() # [seconds]
return queue_lint(vid, delay, self._callback)
def queue_lint(vid, delay, callback):
hit_time = time.monotonic()
def worker():
callback(vid, hit_time)
try:
timers[vid].cancel()
except KeyError:
pass
timers[vid] = timer = threading.Timer(delay, worker)
timer.start()
return hit_time
MIN_DELAY = 0.1
def get_delay():
"""Return the delay between a lint request and when it will be processed.
If the lint mode is not background, there is no delay. Otherwise, if
a "delay" setting is not available in any of the settings, MIN_DELAY is used.
"""
if persist.settings.get('lint_mode') != 'background':
return 0
return persist.settings.get('delay', MIN_DELAY)
queue = Daemon()
|
8337a3912533dfb7d686a453c53adcda783a50a4 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
from sys import version_info
version = "1.3"
deps = ["pbs", "requests>=0.12.1"]
# require argparse on Python <2.7 and <3.2
if (version_info[0] == 2 and version_info[1] < 7) or \
(version_info[0] == 3 and version_info[1] < 2):
deps.append("argparse")
setup(name="livestreamer",
version=version,
description="CLI program that launches streams from various streaming services in a custom video player",
url="https://github.com/chrippa/livestreamer",
author="Christopher Rosell",
author_email="[email protected]",
license="BSD",
packages=["livestreamer", "livestreamer/plugins"],
package_dir={'': 'src'},
entry_points={
"console_scripts": ['livestreamer=livestreamer.cli:main']
},
install_requires=deps,
classifiers=["Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Environment :: Console",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Utilities"]
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
from sys import version_info
version = "1.3"
deps = ["pbs", "requests>=0.12.1"]
# require argparse on Python <2.7 and <3.2
if (version_info[0] == 2 and version_info[1] < 7) or \
(version_info[0] == 3 and version_info[1] < 2):
deps.append("argparse")
setup(name="livestreamer",
version=version,
description="CLI program that launches streams from various streaming services in a custom video player",
url="https://github.com/chrippa/livestreamer",
author="Christopher Rosell",
author_email="[email protected]",
license="BSD",
packages=["livestreamer", "livestreamer.stream", "livestreamer.plugins"],
package_dir={'': 'src'},
entry_points={
"console_scripts": ['livestreamer=livestreamer.cli:main']
},
install_requires=deps,
classifiers=["Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Environment :: Console",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Utilities"]
)
| Add .stream to packages list. | Add .stream to packages list.
| Python | bsd-2-clause | gravyboat/streamlink,javiercantero/streamlink,melmorabity/streamlink,asermax/livestreamer,lyhiving/livestreamer,chhe/livestreamer,derrod/livestreamer,gravyboat/streamlink,okaywit/livestreamer,Dobatymo/livestreamer,fishscene/streamlink,sbstp/streamlink,programming086/livestreamer,melmorabity/streamlink,fishscene/streamlink,wolftankk/livestreamer,Dobatymo/livestreamer,blxd/livestreamer,chrisnicholls/livestreamer,Masaz-/livestreamer,gtmanfred/livestreamer,wolftankk/livestreamer,okaywit/livestreamer,breunigs/livestreamer,blxd/livestreamer,beardypig/streamlink,streamlink/streamlink,sbstp/streamlink,Klaudit/livestreamer,charmander/livestreamer,Feverqwe/livestreamer,lyhiving/livestreamer,beardypig/streamlink,intact/livestreamer,gtmanfred/livestreamer,bastimeyer/streamlink,wlerin/streamlink,ethanhlc/streamlink,flijloku/livestreamer,chhe/streamlink,flijloku/livestreamer,jtsymon/livestreamer,back-to/streamlink,derrod/livestreamer,programming086/livestreamer,hmit/livestreamer,bastimeyer/streamlink,streamlink/streamlink,Feverqwe/livestreamer,caorong/livestreamer,ethanhlc/streamlink,intact/livestreamer,chhe/streamlink,chhe/livestreamer,mmetak/streamlink,chrippa/livestreamer,hmit/livestreamer,javiercantero/streamlink,jtsymon/livestreamer,back-to/streamlink,charmander/livestreamer,Saturn/livestreamer,Masaz-/livestreamer,breunigs/livestreamer,caorong/livestreamer,Saturn/livestreamer,Klaudit/livestreamer,mmetak/streamlink,chrippa/livestreamer,wlerin/streamlink | <REPLACE_OLD> "livestreamer/plugins"],
<REPLACE_NEW> "livestreamer.stream", "livestreamer.plugins"],
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
from setuptools import setup, find_packages
from sys import version_info
version = "1.3"
deps = ["pbs", "requests>=0.12.1"]
# require argparse on Python <2.7 and <3.2
if (version_info[0] == 2 and version_info[1] < 7) or \
(version_info[0] == 3 and version_info[1] < 2):
deps.append("argparse")
setup(name="livestreamer",
version=version,
description="CLI program that launches streams from various streaming services in a custom video player",
url="https://github.com/chrippa/livestreamer",
author="Christopher Rosell",
author_email="[email protected]",
license="BSD",
packages=["livestreamer", "livestreamer.stream", "livestreamer.plugins"],
package_dir={'': 'src'},
entry_points={
"console_scripts": ['livestreamer=livestreamer.cli:main']
},
install_requires=deps,
classifiers=["Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Environment :: Console",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Utilities"]
)
| Add .stream to packages list.
#!/usr/bin/env python
from setuptools import setup, find_packages
from sys import version_info
version = "1.3"
deps = ["pbs", "requests>=0.12.1"]
# require argparse on Python <2.7 and <3.2
if (version_info[0] == 2 and version_info[1] < 7) or \
(version_info[0] == 3 and version_info[1] < 2):
deps.append("argparse")
setup(name="livestreamer",
version=version,
description="CLI program that launches streams from various streaming services in a custom video player",
url="https://github.com/chrippa/livestreamer",
author="Christopher Rosell",
author_email="[email protected]",
license="BSD",
packages=["livestreamer", "livestreamer/plugins"],
package_dir={'': 'src'},
entry_points={
"console_scripts": ['livestreamer=livestreamer.cli:main']
},
install_requires=deps,
classifiers=["Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"Environment :: Console",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Utilities"]
)
|
0f427ed334f8a58e888872d60419709cfd6f41c3 | var/spack/repos/builtin/packages/nccmp/package.py | var/spack/repos/builtin/packages/nccmp/package.py | from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| Tweak nccmp to be more spack-compatible. | Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?
| Python | lgpl-2.1 | skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,TheTimmy/spack,krafczyk/spack,LLNL/spack,lgarren/spack,TheTimmy/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,lgarren/spack,matthiasdiener/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,lgarren/spack,matthiasdiener/spack,krafczyk/spack,skosukhin/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,TheTimmy/spack,LLNL/spack,LLNL/spack,skosukhin/spack,mfherbst/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,krafczyk/spack | <REPLACE_OLD> *
import os
class <REPLACE_NEW> *
class <REPLACE_END> <REPLACE_OLD> os.environ['FC'] = os.environ['F90']
<REPLACE_NEW> env.pop('F90', None)
<REPLACE_END> <REPLACE_OLD> del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
<REPLACE_NEW> env.pop('F90FLAGS', None)
<REPLACE_END> <REPLACE_OLD> prefix)
<REPLACE_NEW> prefix)
<REPLACE_END> <|endoftext|> from spack import *
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
env.pop('F90', None)
env.pop('F90FLAGS', None)
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
| Tweak nccmp to be more spack-compatible.
- Spack doesn't set F90, but it confuses the nccmp build. Just remove
it from the environment.
- TODO: should build environment unset this variable?
from spack import *
import os
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
depends_on('netcdf')
def install(self, spec, prefix):
# Configure says: F90 and F90FLAGS are replaced by FC and
# FCFLAGS respectively in this configure, please unset
# F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
# again.
os.environ['FC'] = os.environ['F90']
del os.environ['F90']
try:
os.environ['FCFLAGS'] = os.environ['F90FLAGS']
del os.environ['F90FLAGS']
except KeyError: # There are no flags
pass
configure('--prefix=%s' % prefix)
make()
make("check")
make("install")
|
2a30afaea9d4cb1d704fd5ec0d78a946770c1c18 | scripts/download-jamendo.py | scripts/download-jamendo.py | #!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
| Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis) | Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)
| Python | agpl-3.0 | foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm | <REPLACE_OLD> <REPLACE_NEW> #!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os.path, urllib
class DownloadJamendo:
def __init__(self, destination):
if not os.path.exists(destination):
os.mkdir(destination)
self.destination = destination
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
self.proc_artist(elem)
def proc_artist(self, elem):
for artist_e in elem.getchildren():
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
self.proc_album(album_e)
def proc_album(self, elem):
for album_e in elem.getchildren():
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
self.proc_track(track_e)
def proc_track(self, elem):
track_id = None
track_license = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track_id = int(track_e.text)
if track_e.tag == "license":
track_license = track_e.text
if track_id and track_license:
if self.free_license(track_license):
trackurl = "http://api.jamendo.com/get2/stream/track/redirect/?id=%d&streamencoding=ogg2" % track_id
trackfile = os.path.join(self.destination, "%d.ogg" % track_id)
if os.path.exists(trackfile):
print "Already downloaded track %d" % track_id
else:
print "Downloading %s to %s" % (trackurl, trackfile)
urllib.urlretrieve(trackurl, trackfile)
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> <destination>"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
downloader = DownloadJamendo(sys.argv[2])
downloader.parse(dump)
| Add a script to download all the fully free tracks from Jamendo (as Ogg Vorbis)
|
|
9d1dc2ef7db2f883e05286edd3865acfdadc19be | django-oracle-drcp/base.py | django-oracle-drcp/base.py | # pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
poolconfig = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **poolconfig)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
| # pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
pool_config = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **pool_config)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
| Change variable name consistently to pool_config | Change variable name consistently to pool_config
| Python | bsd-2-clause | JohnPapps/django-oracle-drcp | <REPLACE_OLD> poolconfig <REPLACE_NEW> pool_config <REPLACE_END> <REPLACE_OLD> **poolconfig)
<REPLACE_NEW> **pool_config)
<REPLACE_END> <|endoftext|> # pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
pool_config = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **pool_config)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
| Change variable name consistently to pool_config
# pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
poolconfig = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **poolconfig)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
|
8be84789d561c916b6d37e61537c4d957061a380 | diceserver.py | diceserver.py | #!/usr/bin/env python
import random
from twisted.protocols import amp
port = 1234
_rand = random.Random()
class RollDice(amp.Command):
arguments = [('sides', amp.Integer())]
response = [('result', amp.Integer())]
class Dice(amp.AMP):
def roll(self, sides=6):
"""Return a random integer from 1 to sides"""
result = _rand.randint(1, sides)
return {'result': result}
RollDice.responder(roll)
def main():
from twisted.internet import reactor
from twisted.internet.protocol import Factory
pf = Factory()
pf.protocol = Dice
reactor.listenTCP(port, pf)
reactor.run()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import random
from twisted.protocols import amp
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.python import usage
port = 1234
_rand = random.Random()
class Options(usage.Options):
optParameters = [
["port", "p", port, "server port"],
]
class RollDice(amp.Command):
arguments = [('sides', amp.Integer())]
response = [('result', amp.Integer())]
class Dice(amp.AMP):
def roll(self, sides=6):
"""Return a random integer from 1 to sides"""
result = _rand.randint(1, sides)
return {'result': result}
RollDice.responder(roll)
def main():
options = Options()
try:
options.parseOptions()
except usage.UsageError, err:
print "%s: %s" % (sys.argv[0], err)
print "%s: Try --help for usage details" % sys.argv[0]
sys.exit(1)
port = int(options["port"])
pf = Factory()
pf.protocol = Dice
reactor.listenTCP(port, pf)
reactor.run()
if __name__ == '__main__':
main()
| Add command-line option to set port. | Add command-line option to set port.
| Python | mit | dripton/ampchat | <REPLACE_OLD> amp
port <REPLACE_NEW> amp
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.python import usage
port <REPLACE_END> <REPLACE_OLD> random.Random()
class <REPLACE_NEW> random.Random()
class Options(usage.Options):
optParameters = [
["port", "p", port, "server port"],
]
class <REPLACE_END> <REPLACE_OLD> from twisted.internet import reactor
from twisted.internet.protocol import Factory
<REPLACE_NEW> options = Options()
try:
options.parseOptions()
except usage.UsageError, err:
print "%s: %s" % (sys.argv[0], err)
print "%s: Try --help for usage details" % sys.argv[0]
sys.exit(1)
port = int(options["port"])
<REPLACE_END> <|endoftext|> #!/usr/bin/env python
import random
from twisted.protocols import amp
from twisted.internet import reactor
from twisted.internet.protocol import Factory
from twisted.python import usage
port = 1234
_rand = random.Random()
class Options(usage.Options):
optParameters = [
["port", "p", port, "server port"],
]
class RollDice(amp.Command):
arguments = [('sides', amp.Integer())]
response = [('result', amp.Integer())]
class Dice(amp.AMP):
def roll(self, sides=6):
"""Return a random integer from 1 to sides"""
result = _rand.randint(1, sides)
return {'result': result}
RollDice.responder(roll)
def main():
options = Options()
try:
options.parseOptions()
except usage.UsageError, err:
print "%s: %s" % (sys.argv[0], err)
print "%s: Try --help for usage details" % sys.argv[0]
sys.exit(1)
port = int(options["port"])
pf = Factory()
pf.protocol = Dice
reactor.listenTCP(port, pf)
reactor.run()
if __name__ == '__main__':
main()
| Add command-line option to set port.
#!/usr/bin/env python
import random
from twisted.protocols import amp
port = 1234
_rand = random.Random()
class RollDice(amp.Command):
arguments = [('sides', amp.Integer())]
response = [('result', amp.Integer())]
class Dice(amp.AMP):
def roll(self, sides=6):
"""Return a random integer from 1 to sides"""
result = _rand.randint(1, sides)
return {'result': result}
RollDice.responder(roll)
def main():
from twisted.internet import reactor
from twisted.internet.protocol import Factory
pf = Factory()
pf.protocol = Dice
reactor.listenTCP(port, pf)
reactor.run()
if __name__ == '__main__':
main()
|
42a287d23a1153df636c193695615d99b7c75e4d | test/stop_all.py | test/stop_all.py | import urbackup_api
server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo")
for action in server.get_actions():
a = action["action"]
if a ==server.action_full_file or a==server.action_resumed_full_file:
print("Running full file backup: "+action["name"])
print("Stopping...")
server.stop_action(action) | Test stopping all running file backups | Test stopping all running file backups
| Python | apache-2.0 | uroni/urbackup-server-python-web-api-wrapper | <INSERT> import urbackup_api
server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo")
for action in server.get_actions():
<INSERT_END> <INSERT> a = action["action"]
if a ==server.action_full_file or a==server.action_resumed_full_file:
print("Running full file backup: "+action["name"])
print("Stopping...")
server.stop_action(action) <INSERT_END> <|endoftext|> import urbackup_api
server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo")
for action in server.get_actions():
a = action["action"]
if a ==server.action_full_file or a==server.action_resumed_full_file:
print("Running full file backup: "+action["name"])
print("Stopping...")
server.stop_action(action) | Test stopping all running file backups
|
|
3154ef23b48a42e274417a28953c55b98ac3fec3 | filters/png2jpg.py | filters/png2jpg.py | """
Change image extensions from .png to .jpg
EXAMPLE:
>>>> echo An  | pandoc -F png2jpg.py
"""
import panflute as pf
def action(elem, doc):
if isinstance(elem, pf.Image):
elem.url = elem.url.replace('.png', '.jpg')
return elem
def main(doc=None):
return pf.run_filter(action, doc=doc)
if __name__ == '__main__':
main()
| Convert .png endings to .jpg | Convert .png endings to .jpg
| Python | bsd-3-clause | sergiocorreia/panflute-filters | <INSERT> """
Change image extensions from .png to .jpg
EXAMPLE:
<INSERT_END> <INSERT> >>>> echo An  | pandoc -F png2jpg.py
"""
import panflute as pf
def action(elem, doc):
if isinstance(elem, pf.Image):
elem.url = elem.url.replace('.png', '.jpg')
return elem
def main(doc=None):
return pf.run_filter(action, doc=doc)
if __name__ == '__main__':
main()
<INSERT_END> <|endoftext|> """
Change image extensions from .png to .jpg
EXAMPLE:
>>>> echo An  | pandoc -F png2jpg.py
"""
import panflute as pf
def action(elem, doc):
if isinstance(elem, pf.Image):
elem.url = elem.url.replace('.png', '.jpg')
return elem
def main(doc=None):
return pf.run_filter(action, doc=doc)
if __name__ == '__main__':
main()
| Convert .png endings to .jpg
|
|
74dfabb565dbd6581a300091c045067d0398e899 | source/jormungandr/jormungandr/interfaces/v1/Coverage.py | source/jormungandr/jormungandr/interfaces/v1/Coverage.py | # coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
| # coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
from fields import NonNullNested
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
"error": NonNullNested({
"code": fields.String,
"value": fields.String
})
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
| Add error field to region | Jormungandr: Add error field to region
| Python | agpl-3.0 | VincentCATILLON/navitia,prhod/navitia,xlqian/navitia,prhod/navitia,prhod/navitia,xlqian/navitia,ballouche/navitia,is06/navitia,pbougue/navitia,ballouche/navitia,kadhikari/navitia,CanalTP/navitia,VincentCATILLON/navitia,frodrigo/navitia,CanalTP/navitia,pbougue/navitia,francois-vincent/navitia,TeXitoi/navitia,kinnou02/navitia,stifoon/navitia,frodrigo/navitia,Tisseo/navitia,kadhikari/navitia,stifoon/navitia,kinnou02/navitia,kinnou02/navitia,djludo/navitia,kadhikari/navitia,patochectp/navitia,xlqian/navitia,thiphariel/navitia,patochectp/navitia,xlqian/navitia,fueghan/navitia,fueghan/navitia,fueghan/navitia,CanalTP/navitia,lrocheWB/navitia,djludo/navitia,frodrigo/navitia,kadhikari/navitia,francois-vincent/navitia,VincentCATILLON/navitia,datanel/navitia,lrocheWB/navitia,TeXitoi/navitia,francois-vincent/navitia,antoine-de/navitia,ballouche/navitia,is06/navitia,CanalTP/navitia,francois-vincent/navitia,thiphariel/navitia,Tisseo/navitia,lrocheWB/navitia,kinnou02/navitia,VincentCATILLON/navitia,Tisseo/navitia,frodrigo/navitia,antoine-de/navitia,TeXitoi/navitia,patochectp/navitia,TeXitoi/navitia,stifoon/navitia,CanalTP/navitia,pbougue/navitia,antoine-de/navitia,djludo/navitia,is06/navitia,patochectp/navitia,xlqian/navitia,Tisseo/navitia,prhod/navitia,fueghan/navitia,datanel/navitia,djludo/navitia,thiphariel/navitia,lrocheWB/navitia,datanel/navitia,Tisseo/navitia,datanel/navitia,ballouche/navitia,thiphariel/navitia,pbougue/navitia,is06/navitia,antoine-de/navitia,stifoon/navitia | <INSERT> add_coverage_link, <INSERT_END> <REPLACE_OLD> OrderedDict
region_fields <REPLACE_NEW> OrderedDict
from fields import NonNullNested
region_fields <REPLACE_END> <REPLACE_OLD> fields.String,
}
regions_fields <REPLACE_NEW> fields.String,
"error": NonNullNested({
"code": fields.String,
"value": fields.String
})
}
regions_fields <REPLACE_END> <|endoftext|> # coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
from fields import NonNullNested
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
"error": NonNullNested({
"code": fields.String,
"value": fields.String
})
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
| Jormungandr: Add error field to region
# coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
|
4ce3502e1623ca24e43e01e4c580ee327e6192fa | django_extensions/management/commands/generate_secret_key.py | django_extensions/management/commands/generate_secret_key.py | # -*- coding: utf-8 -*-
from random import choice
from django.core.management.base import BaseCommand
from django_extensions.management.utils import signalcommand
class Command(BaseCommand):
help = "Generates a new SECRET_KEY that can be used in a project settings file."
requires_system_checks = False
@signalcommand
def handle(self, *args, **options):
return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
| # -*- coding: utf-8 -*-
from random import choice
from django.core.management.base import BaseCommand
from django.core.management.utils import get_random_secret_key
from django_extensions.management.utils import signalcommand
class Command(BaseCommand):
help = "Generates a new SECRET_KEY that can be used in a project settings file."
requires_system_checks = False
@signalcommand
def handle(self, *args, **options):
return get_random_secret_key()
| Use same algo to generate SECRET_KEY as Django | Use same algo to generate SECRET_KEY as Django
Using random from standard library is not cryptographically secure. | Python | mit | haakenlid/django-extensions,haakenlid/django-extensions,django-extensions/django-extensions,linuxmaniac/django-extensions,linuxmaniac/django-extensions,django-extensions/django-extensions,haakenlid/django-extensions,django-extensions/django-extensions,linuxmaniac/django-extensions | <REPLACE_OLD> BaseCommand
from <REPLACE_NEW> BaseCommand
from django.core.management.utils import get_random_secret_key
from <REPLACE_END> <REPLACE_OLD> ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
<REPLACE_NEW> get_random_secret_key()
<REPLACE_END> <|endoftext|> # -*- coding: utf-8 -*-
from random import choice
from django.core.management.base import BaseCommand
from django.core.management.utils import get_random_secret_key
from django_extensions.management.utils import signalcommand
class Command(BaseCommand):
help = "Generates a new SECRET_KEY that can be used in a project settings file."
requires_system_checks = False
@signalcommand
def handle(self, *args, **options):
return get_random_secret_key()
| Use same algo to generate SECRET_KEY as Django
Using random from standard library is not cryptographically secure.
# -*- coding: utf-8 -*-
from random import choice
from django.core.management.base import BaseCommand
from django_extensions.management.utils import signalcommand
class Command(BaseCommand):
help = "Generates a new SECRET_KEY that can be used in a project settings file."
requires_system_checks = False
@signalcommand
def handle(self, *args, **options):
return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
|
c138adaf69f5029209f03cafe72f1082cdb78f30 | ppp_nlp_ml_standalone/requesthandler.py | ppp_nlp_ml_standalone/requesthandler.py | """Request handler of the module."""
import ppp_datamodel
from ppp_datamodel import Sentence
from ppp_datamodel.communication import TraceItem, Response
from ppp_nlp_ml_standalone import ExtractTriplet
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
sentence = self.request.tree.value
extract_triplet = ExtractTriplet.ExtractTriplet()
a, b, c = extract_triplet.extract_from_sentence(sentence)
if a == '?':
subject = ppp_datamodel.Missing()
else:
subject = ppp_datamodel.Resource(value=a)
if b == '?':
predicate = ppp_datamodel.Missing()
else:
predicate = ppp_datamodel.Resource(value=b)
if c == '?':
object = ppp_datamodel.Missing()
else:
object = ppp_datamodel.Resource(value=b)
triple = ppp_datamodel.Triple(subject=subject,
predicate=predicate,
object=object)
meas = {'accuracy': 0.5, 'relevance': 0.5}
trace = self.request.trace + [TraceItem('NLP-ML-standalone', triple, meas)]
response = Response('en', triple, meas, trace)
print(repr(response))
return [response] | """Request handler of the module."""
import ppp_datamodel
from ppp_datamodel import Sentence, Missing, Resource
from ppp_datamodel.communication import TraceItem, Response
from ppp_nlp_ml_standalone import ExtractTriplet
def missing_or_resource(x):
return Missing() if x == '?' else Resource(value=x)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
sentence = self.request.tree.value
extract_triplet = ExtractTriplet.ExtractTriplet()
triple = extract_triplet.extract_from_sentence(sentence)
(subject, predicate, object) = map(missing_or_resource, triple)
triple = ppp_datamodel.Triple(subject=subject,
predicate=predicate,
object=object)
meas = {'accuracy': 0.5, 'relevance': 0.5}
trace = self.request.trace + [TraceItem('NLP-ML-standalone', triple, meas)]
response = Response('en', triple, meas, trace)
print(repr(response))
return [response]
| Make RequestHandler's code less redundant. | Make RequestHandler's code less redundant.
| Python | mit | ProjetPP/PPP-QuestionParsing-ML-Standalone,ProjetPP/PPP-QuestionParsing-ML-Standalone | <REPLACE_OLD> Sentence
from <REPLACE_NEW> Sentence, Missing, Resource
from <REPLACE_END> <REPLACE_OLD> ExtractTriplet
class <REPLACE_NEW> ExtractTriplet
def missing_or_resource(x):
return Missing() if x == '?' else Resource(value=x)
class <REPLACE_END> <REPLACE_OLD> a, b, c = extract_triplet.extract_from_sentence(sentence)
if a == '?':
subject = ppp_datamodel.Missing()
else:
subject = ppp_datamodel.Resource(value=a)
if b == '?':
predicate = ppp_datamodel.Missing()
else:
predicate = ppp_datamodel.Resource(value=b)
if c == '?':
object = ppp_datamodel.Missing()
else:
object = ppp_datamodel.Resource(value=b)
<REPLACE_NEW> triple = extract_triplet.extract_from_sentence(sentence)
(subject, predicate, object) = map(missing_or_resource, triple)
<REPLACE_END> <REPLACE_OLD> [response] <REPLACE_NEW> [response]
<REPLACE_END> <|endoftext|> """Request handler of the module."""
import ppp_datamodel
from ppp_datamodel import Sentence, Missing, Resource
from ppp_datamodel.communication import TraceItem, Response
from ppp_nlp_ml_standalone import ExtractTriplet
def missing_or_resource(x):
return Missing() if x == '?' else Resource(value=x)
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
sentence = self.request.tree.value
extract_triplet = ExtractTriplet.ExtractTriplet()
triple = extract_triplet.extract_from_sentence(sentence)
(subject, predicate, object) = map(missing_or_resource, triple)
triple = ppp_datamodel.Triple(subject=subject,
predicate=predicate,
object=object)
meas = {'accuracy': 0.5, 'relevance': 0.5}
trace = self.request.trace + [TraceItem('NLP-ML-standalone', triple, meas)]
response = Response('en', triple, meas, trace)
print(repr(response))
return [response]
| Make RequestHandler's code less redundant.
"""Request handler of the module."""
import ppp_datamodel
from ppp_datamodel import Sentence
from ppp_datamodel.communication import TraceItem, Response
from ppp_nlp_ml_standalone import ExtractTriplet
class RequestHandler:
def __init__(self, request):
self.request = request
def answer(self):
if not isinstance(self.request.tree, Sentence):
return []
sentence = self.request.tree.value
extract_triplet = ExtractTriplet.ExtractTriplet()
a, b, c = extract_triplet.extract_from_sentence(sentence)
if a == '?':
subject = ppp_datamodel.Missing()
else:
subject = ppp_datamodel.Resource(value=a)
if b == '?':
predicate = ppp_datamodel.Missing()
else:
predicate = ppp_datamodel.Resource(value=b)
if c == '?':
object = ppp_datamodel.Missing()
else:
object = ppp_datamodel.Resource(value=b)
triple = ppp_datamodel.Triple(subject=subject,
predicate=predicate,
object=object)
meas = {'accuracy': 0.5, 'relevance': 0.5}
trace = self.request.trace + [TraceItem('NLP-ML-standalone', triple, meas)]
response = Response('en', triple, meas, trace)
print(repr(response))
return [response] |
Subsets and Splits