commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
723f59d43cce9d7a09386447389e8df33b5d323e
tests/base/base.py
tests/base/base.py
import steel import unittest class NameAwareOrderedDictTests(unittest.TestCase): def setUp(self): self.d = steel.NameAwareOrderedDict() def test_ignore_object(self): # Objects without a set_name() method should be ignored self.d['example'] = object() self.assertFalse(hasattr(self.d['example'], 'name')) def test_auto_name(self): # Objects with a set_name() method should be told their name class NamedObject(object): def set_name(self, name): self.name = name self.d['example'] = NamedObject() self.assertEqual(self.d['example'].name, 'example') def test_errors(self): # Make sure set_name() errors are raised, not swallowed class ErrorObject(object): "Just a simple object that errors out while setting its name" def set_name(self, name): raise TypeError('Something went wrong') with self.assertRaises(TypeError): self.d['example'] = ErrorObject()
import steel import unittest class NameAwareOrderedDictTests(unittest.TestCase): def setUp(self): self.d = steel.NameAwareOrderedDict() def test_ignore_object(self): # Objects without a set_name() method should be ignored self.d['example'] = object() self.assertFalse(hasattr(self.d['example'], 'name')) def test_auto_name(self): # Objects with a set_name() method should be told their name class NamedObject(object): def set_name(self, name): self.name = name self.d['example'] = NamedObject() self.assertEqual(self.d['example'].name, 'example') def test_errors(self): # Make sure set_name() errors are raised, not swallowed class ErrorObject(object): "Just a simple object that errors out while setting its name" def set_name(self, name): raise TypeError('Something went wrong') with self.assertRaises(TypeError): self.d['example'] = ErrorObject() class SizeTests(unittest.TestCase): def test_explicit_sizes(self): class Test(steel.Structure): field1 = steel.Bytes(size=2) field2 = steel.Bytes(size=4) self.assertEqual(Test.size, 6)
Add a simple test for calculating the size of a structure
Add a simple test for calculating the size of a structure
Python
bsd-3-clause
gulopine/steel-experiment
73af60749ea7b031473bc5f0c3ddd60d39ec6fa6
docs/examples/customer_fetch/get_customer.py
docs/examples/customer_fetch/get_customer.py
from sharpy.product import CheddarProduct # Get a product instance to work with product = CheddarProduct( username = CHEDDAR_USERNAME, password = CHEDDAR_PASSWORD, product_code = CHEDDAR_PRODUCT, ) # Get the customer from Cheddar Getter customer = product.get_customer(code='1BDI')
from sharpy.product import CheddarProduct from sharpy import exceptions # Get a product instance to work with product = CheddarProduct( username = CHEDDAR_USERNAME, password = CHEDDAR_PASSWORD, product_code = CHEDDAR_PRODUCT, ) try: # Get the customer from Cheddar Getter customer = product.get_customer(code='1BDI') except exceptions.NotFound, err: print 'You do not appear to be a customer yet' else: # Test if the customer's subscription is canceled if customer.subscription.canceled: if customer.subscription.cancel_type == 'paypal-pending': print 'Waiting for Paypal authorization' else: print 'Your subscription appears to have been cancelled' else: print 'Your subscription appears to be active'
Add a bit to the get customer to show handling not-found and testing for canceled status
Add a bit to the get customer to show handling not-found and testing for canceled status
Python
bsd-3-clause
SeanOC/sharpy,smartfile/sharpy
7f9ea07f2ee55ff36009bc67068c36bc1180c909
tests/test_credentials.py
tests/test_credentials.py
import json import keyring from pyutrack import Credentials from tests import PyutrackTest class CredentialsTests(PyutrackTest): def test_empty(self): c = Credentials('root') self.assertIsNone(c.password) self.assertIsNone(c.cookies) def test_persistence(self): c = Credentials('root', 'passwd', {"key": "value"}) c.persist() self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} )
import json import keyring from pyutrack import Credentials from tests import PyutrackTest class CredentialsTests(PyutrackTest): def test_empty(self): c = Credentials('root') self.assertIsNone(c.password) self.assertIsNone(c.cookies) def test_persistence(self): c = Credentials('root', 'passwd', {"key": "value"}) c.persist() self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} ) def test_reload(self): Credentials('root', 'passwd', {"key": "value"}).persist() c = Credentials('root') self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} )
Add test for credentials reload
Add test for credentials reload
Python
mit
alisaifee/pyutrack,alisaifee/pyutrack
82b7e46ebdeb154963520fec1d41cc624ceb806d
tests/test_vendcrawler.py
tests/test_vendcrawler.py
import unittest from vendcrawler.scripts.vendcrawler import VendCrawler class TestVendCrawlerMethods(unittest.TestCase): def test_get_links(self): links = VendCrawler().get_links(2) self.assertEqual(links, ['https://sarahserver.net/?module=vendor&p=1', 'https://sarahserver.net/?module=vendor&p=2']) def test_get_page_count(self): with open('test_vendcrawler.html', 'r') as f: data = f.read() page_count = VendCrawler().get_page_count(str(data)) self.assertEqual(int(page_count), 84) if __name__ == '__main__': unittest.main()
import unittest from vendcrawler.scripts.vendcrawler import VendCrawler class TestVendCrawlerMethods(unittest.TestCase): def test_get_links(self): links = VendCrawler('a', 'b', 'c').get_links(2) self.assertEqual(links, ['https://sarahserver.net/?module=vendor&p=1', 'https://sarahserver.net/?module=vendor&p=2']) def test_get_page_count(self): with open('test_vendcrawler.html', 'r') as f: data = f.read() page_count = VendCrawler('a', 'b', 'c').get_page_count(str(data)) self.assertEqual(int(page_count), 84) if __name__ == '__main__': unittest.main()
Fix test by passing placeholder variables.
Fix test by passing placeholder variables.
Python
mit
josetaas/vendcrawler,josetaas/vendcrawler,josetaas/vendcrawler
154d7c17228cf9196ea4ee6b5e13a5268cc69407
script/release/release/pypi.py
script/release/release/pypi.py
from __future__ import absolute_import from __future__ import unicode_literals from configparser import Error from requests.exceptions import HTTPError from twine.commands.upload import main as twine_upload from twine.utils import get_config from .utils import ScriptError def pypi_upload(args): print('Uploading to PyPi') try: rel = args.release.replace('-rc', 'rc') twine_upload([ 'dist/docker_compose-{}*.whl'.format(rel), 'dist/docker-compose-{}*.tar.gz'.format(rel) ]) except HTTPError as e: if e.response.status_code == 400 and 'File already exists' in e.message: if not args.finalize_resume: raise ScriptError( 'Package already uploaded on PyPi.' ) print('Skipping PyPi upload - package already uploaded') else: raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e)) def check_pypirc(): try: config = get_config() except Error as e: raise ScriptError('Failed to parse .pypirc file: {}'.format(e)) if config is None: raise ScriptError('Failed to parse .pypirc file') if 'pypi' not in config: raise ScriptError('Missing [pypi] section in .pypirc file') if not (config['pypi'].get('username') and config['pypi'].get('password')): raise ScriptError('Missing login/password pair for pypi repo')
from __future__ import absolute_import from __future__ import unicode_literals from configparser import Error from requests.exceptions import HTTPError from twine.commands.upload import main as twine_upload from twine.utils import get_config from .utils import ScriptError def pypi_upload(args): print('Uploading to PyPi') try: rel = args.release.replace('-rc', 'rc') twine_upload([ 'dist/docker_compose-{}*.whl'.format(rel), 'dist/docker-compose-{}*.tar.gz'.format(rel) ]) except HTTPError as e: if e.response.status_code == 400 and 'File already exists' in str(e): if not args.finalize_resume: raise ScriptError( 'Package already uploaded on PyPi.' ) print('Skipping PyPi upload - package already uploaded') else: raise ScriptError('Unexpected HTTP error uploading package to PyPi: {}'.format(e)) def check_pypirc(): try: config = get_config() except Error as e: raise ScriptError('Failed to parse .pypirc file: {}'.format(e)) if config is None: raise ScriptError('Failed to parse .pypirc file') if 'pypi' not in config: raise ScriptError('Missing [pypi] section in .pypirc file') if not (config['pypi'].get('username') and config['pypi'].get('password')): raise ScriptError('Missing login/password pair for pypi repo')
Fix script for release file already present case
Fix script for release file already present case This avoids a: "AttributeError: 'HTTPError' object has no attribute 'message'" Signed-off-by: Ulysses Souza <[email protected]>
Python
apache-2.0
thaJeztah/compose,thaJeztah/compose,vdemeester/compose,vdemeester/compose
f8c6876f6a1567fb0967c12365ae061d44d6f3db
mmipylint/main.py
mmipylint/main.py
import logging import mothermayi.errors import mothermayi.files import subprocess LOGGER = logging.getLogger(__name__) def plugin(): return { 'name' : 'pylint', 'pre-commit' : pre_commit, } def pre_commit(config, staged): pylint = config.get('pylint', {}) args = pylint.get('args', []) to_check = mothermayi.files.python_source(staged) if not to_check: return command = ['pylint'] + args + to_check LOGGER.debug("Executing %s", " ".join(command)) try: output = subprocess.check_output(command, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: raise mothermayi.errors.FailHook(str(e.output.decode('utf-8')))
import logging import mothermayi.errors import mothermayi.files import subprocess LOGGER = logging.getLogger(__name__) def plugin(): return { 'name' : 'pylint', 'pre-commit' : pre_commit, } def pre_commit(config, staged): pylint = config.get('pylint', {}) args = pylint.get('args', []) to_check = mothermayi.files.python_source(staged) if not to_check: return command = ['pylint'] + args + list(to_check) LOGGER.debug("Executing %s", " ".join(command)) try: output = subprocess.check_output(command, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: raise mothermayi.errors.FailHook(str(e.output.decode('utf-8')))
Handle the fact that to_check is a set
Handle the fact that to_check is a set
Python
mit
EliRibble/mothermayi-pylint
56fca00d992c84e46e60fa8b9ea66eb9eadc7508
mgsv_names.py
mgsv_names.py
from __future__ import unicode_literals, print_function import sqlite3, os, random _select = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});' _uncommon_select = 'select value from uncommons where key=?;' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone() uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': print(generate_name())
from __future__ import unicode_literals, print_function import sqlite3, os, random _select_random = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});' _select_uncommon = 'select value from uncommons where key=?;' def generate_name(): conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db')) cursor = conn.cursor() adj = cursor.execute(_select_random.format('adjective', 'adjectives')).fetchone()[0] anim = cursor.execute(_select_random.format('animal', 'animals')).fetchone()[0] rare = cursor.execute(_select_random.format('name', 'rares')).fetchone()[0] uncommon_anim = cursor.execute(_select_uncommon, [adj]).fetchone() uncommon_adj = cursor.execute(_select_uncommon, [anim]).fetchone() conn.close() r = random.random() if r < 0.001 or r >= 0.999: return rare elif r < 0.3 and uncommon_anim is not None: return ' '.join((adj, uncommon_anim[0])) elif r >= 0.7 and uncommon_adj is not None: return ' '.join((uncommon_adj[0], anim)) return ' '.join((adj, anim)) if __name__ == '__main__': for _ in range(20): print(generate_name())
Rename the SQL module vars for consistency.
Rename the SQL module vars for consistency.
Python
unlicense
rotated8/mgsv_names
c26fc5da048bb1751bb6401dbdb8839f89d82c1e
nova/policies/server_diagnostics.py
nova/policies/server_diagnostics.py
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base BASE_POLICY_NAME = 'os_compute_api:os-server-diagnostics' server_diagnostics_policies = [ policy.DocumentedRuleDefault( BASE_POLICY_NAME, base.RULE_ADMIN_API, "Show the usage data for a server", [ { 'method': 'GET', 'path': '/servers/{server_id}/diagnostics' } ]), ] def list_rules(): return server_diagnostics_policies
# Copyright 2016 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from nova.policies import base BASE_POLICY_NAME = 'os_compute_api:os-server-diagnostics' server_diagnostics_policies = [ policy.DocumentedRuleDefault( name=BASE_POLICY_NAME, check_str=base.RULE_ADMIN_API, description="Show the usage data for a server", operations=[ { 'method': 'GET', 'path': '/servers/{server_id}/diagnostics' } ], scope_types=['system', 'project']), ] def list_rules(): return server_diagnostics_policies
Introduce scope_types in server diagnostics
Introduce scope_types in server diagnostics oslo.policy introduced the scope_type feature which can control the access level at system-level and project-level. - https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope - http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html Appropriate scope_type for nova case: - https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope This commit introduce scope_type for server diagnostics API policies as 'system' and 'project' but we will keep default as SYSTEM_ADMIN only. Also adds the test case with scope_type enabled and verify we pass and fail the policy check with expected context. Partial implement blueprint policy-defaults-refresh Change-Id: Ie1749d4f85b8bdc4110f57c9a33e54e9551cb7e3
Python
apache-2.0
mahak/nova,klmitch/nova,openstack/nova,klmitch/nova,klmitch/nova,mahak/nova,mahak/nova,klmitch/nova,openstack/nova,openstack/nova
0cd8be8ce3b11fe9c2591591c12cad5b688e6d0e
test/platform/TestPlatformCommand.py
test/platform/TestPlatformCommand.py
""" Test some lldb platform commands. """ import os, time import unittest2 import lldb from lldbtest import * class PlatformCommandTestCase(TestBase): mydir = "platform" def test_help_platform(self): self.runCmd("help platform") def test_list(self): self.expect("platform list", patterns = ['^Available platforms:']) def test_process_list(self): self.expect("platform process list", substrs = ['PID', 'TRIPLE', 'NAME']) def test_status(self): self.expect("platform status", substrs = ['Platform', 'Triple', 'OS Version', 'Kernel', 'Hostname']) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
""" Test some lldb platform commands. """ import os, time import unittest2 import lldb from lldbtest import * class PlatformCommandTestCase(TestBase): mydir = "platform" def test_help_platform(self): self.runCmd("help platform") def test_list(self): self.expect("platform list", patterns = ['^Available platforms:']) def test_process_list(self): self.expect("platform process list", substrs = ['PID', 'ARCH', 'NAME']) def test_status(self): self.expect("platform status", substrs = ['Platform', 'Triple', 'OS Version', 'Kernel', 'Hostname']) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
Modify test_process_list()'s expect sub-strings to be up-to-date.
Modify test_process_list()'s expect sub-strings to be up-to-date. git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@128697 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb
f25dde13d04e9c6eda28ac76444682e53accbdb3
src/webapp/tasks.py
src/webapp/tasks.py
import database as db from database.model import Team from geotools import simple_distance from geotools.routing import MapPoint try: from uwsgidecorators import spool except ImportError as e: def spool(fn): def nufun(*args, **kwargs): raise e return nufun @spool def get_aqua_distance(args): team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first() if team is None: return target = MapPoint.from_team(team) #aqua = MapPoint(51.04485, 13.74011) # real aqua aqua = MapPoint(51.05299472808838, 13.742453455924988) # hfbk team.location.center_distance = simple_distance(target, aqua) db.session.commit()
import database as db from database.model import Team from geotools import simple_distance from geotools.routing import MapPoint from cfg import config try: from uwsgidecorators import spool except ImportError as e: def spool(fn): def nufun(*args, **kwargs): raise e return nufun @spool def get_aqua_distance(args): team = db.session.query(Team).filter(Team.id == int(args["team_id"])).first() if team is None: return target = MapPoint.from_team(team) aqua = MapPoint(*config.CENTER_POINT) team.location.center_distance = simple_distance(target, aqua) db.session.commit()
Read center point from config
Read center point from config Signed-off-by: Jan Losinski <[email protected]>
Python
bsd-3-clause
eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
c9762cb5362a7fdba6050f5044db00d34eae6edb
confluent/main.py
confluent/main.py
# Copyright 2013 IBM Corporation # All rights reserved # This is the main application. # It should check for existing UDP socket to negotiate socket listen takeover # It will have three paths into it: # -Unix domain socket # -TLS socket # -WSGI # Additionally, it will be able to receive particular UDP packets to facilitate # Things like heartbeating and discovery # It also will optionally snoop SLP DA requests import confluent.pluginapi as pluginapi import confluent.httpapi as httpapi import confluent.sockapi as sockapi import eventlet import eventlet.backdoor as backdoor from eventlet.green import socket from eventlet import wsgi import multiprocessing import sys import os def run(): pluginapi.load_plugins() #TODO: eventlet has a bug about unix domain sockets, this code works with bugs fixed #dbgsock = eventlet.listen("/var/run/confluent/dbg.sock", family=socket.AF_UNIX) #eventlet.spawn_n(backdoor.backdoor_server, dbgsock) webservice = httpapi.HttpApi() webservice.start() sockservice = sockapi.SockApi() sockservice.start() while (1): eventlet.sleep(100)
# Copyright 2013 IBM Corporation # All rights reserved # This is the main application. # It should check for existing UDP socket to negotiate socket listen takeover # It will have three paths into it: # -Unix domain socket # -TLS socket # -WSGI # Additionally, it will be able to receive particular UDP packets to facilitate # Things like heartbeating and discovery # It also will optionally snoop SLP DA requests import confluent.pluginapi as pluginapi import confluent.httpapi as httpapi import confluent.sockapi as sockapi import eventlet import eventlet.backdoor as backdoor from eventlet.green import socket from eventlet import wsgi import multiprocessing import sys import os def run(): pluginapi.load_plugins() #TODO(jbjohnso): eventlet has a bug about unix domain sockets, this code #works with bugs fixed #dbgsock = eventlet.listen("/var/run/confluent/dbg.sock", # family=socket.AF_UNIX) #eventlet.spawn_n(backdoor.backdoor_server, dbgsock) webservice = httpapi.HttpApi() webservice.start() sockservice = sockapi.SockApi() sockservice.start() while (1): eventlet.sleep(100)
Rework commented out code a tad
Rework commented out code a tad
Python
apache-2.0
chenglch/confluent,jufm/confluent,chenglch/confluent,whowutwut/confluent,xcat2/confluent,jjohnson42/confluent,michaelfardu/thinkconfluent,jjohnson42/confluent,xcat2/confluent,chenglch/confluent,jjohnson42/confluent,jufm/confluent,whowutwut/confluent,michaelfardu/thinkconfluent,jufm/confluent,xcat2/confluent,chenglch/confluent,jufm/confluent,jjohnson42/confluent,xcat2/confluent,jjohnson42/confluent,xcat2/confluent,chenglch/confluent,jufm/confluent,michaelfardu/thinkconfluent,whowutwut/confluent,michaelfardu/thinkconfluent,michaelfardu/thinkconfluent,whowutwut/confluent
638f52f59135d151a3c7ed4f84fc0716c6c0d69d
mcbench/xpath.py
mcbench/xpath.py
import sys import lxml.etree class XPathError(Exception): pass def parse_xml_filename(filename): return lxml.etree.parse(filename) def compile_xpath(query): try: return lxml.etree.XPath(query) except lxml.etree.XPathSyntaxError as e: raise XPathError(e.msg), None, sys.exc_info()[2] def register_extensions(): ns = lxml.etree.FunctionNamespace(None) ns['is_call'] = lambda c, n: is_call(c.context_node, n) def is_call(node, name): return (node.tag == 'ParameterizedExpr' and node[0].tag == 'NameExpr' and node[0].get('kind') == 'FUN' and node[0][0].get('nameId') == name)
import sys import lxml.etree class XPathError(Exception): pass def parse_xml_filename(filename): return lxml.etree.parse(filename) def compile_xpath(query): try: return lxml.etree.XPath(query) except lxml.etree.XPathSyntaxError as e: raise XPathError(e.msg), None, sys.exc_info()[2] def register_extensions(): ns = lxml.etree.FunctionNamespace(None) ns['is_call'] = is_call def is_call(context, *names): node = context.context_node if node.tag != 'ParameterizedExpr': return False if node[0].tag != 'NameExpr' or node[0].get('kind') != 'FUN': return False called_name = node[0][0].get('nameId') # Could this function like # is_call('eval', 'feval') -> names is a tuple of strings # is_call(//some/sequence) -> names[0] is a list of strings for name in names: if isinstance(name, basestring) and called_name == name: return True elif any(called_name == n for n in name): return True return False
Make is_call handle multiple arguments.
Make is_call handle multiple arguments. Can now be called with a sequence, as in is_call(ancestor::Function/@name) or just several literals, as in is_call('eval', 'feval'). In both cases, it does an or.
Python
mit
isbadawi/mcbench,isbadawi/mcbench
b482eb5c8ff7dc346a3c7037c2218a4b2f2d61c4
setup/create_player_seasons.py
setup/create_player_seasons.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() with session_scope() as session: players = session.query(Player).all()[:] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_seasons = future.result() except Exception as e: print("Concurrent task generated an exception: %s" % e)
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging import concurrent.futures from db.common import session_scope from db.player import Player from utils.player_data_retriever import PlayerDataRetriever logger = logging.getLogger(__name__) def create_player_seasons(simulation=False): data_retriever = PlayerDataRetriever() plr_season_count = 0 with session_scope() as session: players = session.query(Player).all()[:] with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads: future_tasks = { threads.submit( data_retriever.retrieve_player_seasons, player.player_id, simulation ): player for player in players } for future in concurrent.futures.as_completed(future_tasks): try: plr_season_count += len(future.result()) except Exception as e: print("Concurrent task generated an exception: %s" % e) logger.info("+ %d statistics items retrieved overall" % plr_season_count)
Add logging to player season creation script
Add logging to player season creation script
Python
mit
leaffan/pynhldb
36cc1ecc2b64d5c31ea590dddbf9e12c71542c7d
sphinxcontrib/openstreetmap.py
sphinxcontrib/openstreetmap.py
# -*- coding: utf-8 -*- """ sphinxcontrib.openstreetmap =========================== Embed OpenStreetMap on your documentation. :copyright: Copyright 2015 HAYASHI Kentaro <[email protected]> :license: BSD, see LICENSE for details. """ from docutils import nodes from docutils.parsers.rst import directives from sphinx.util.compat import Directive class openstreetmap(nodes.General, nodes.Element): pass class OpenStreetMapDirective(Directive): """Directive for embedding OpenStreetMap""" has_content = False option_spec = { 'id': directives.unchanged, 'label': directives.unchanged, 'marker': directives.unchanged, } def run(self): node = openstreetmap() if 'id' in self.options: node['id'] = self.options['id'] else: msg = ('openstreetmap directive needs uniqueue id for map data') return [document.reporter.warning(msg, line=self.lineno)] return [node] def visit_openstreetmap_node(self, node): self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>") def depart_openstreetmap_node(self, node): pass def setup(app): app.add_node(openstreetmap, html=(visit_openstreetmap_node, depart_openstreetmap_node)) app.add_directive('openstreetmap', OpenStreetMapDirective)
# -*- coding: utf-8 -*- """ sphinxcontrib.openstreetmap =========================== Embed OpenStreetMap on your documentation. :copyright: Copyright 2015 HAYASHI Kentaro <[email protected]> :license: BSD, see LICENSE for details. """ from docutils import nodes from docutils.parsers.rst import directives from sphinx.util.compat import Directive class openstreetmap(nodes.General, nodes.Element): pass class OpenStreetMapDirective(Directive): """Directive for embedding OpenStreetMap""" has_content = False option_spec = { 'id': directives.unchanged, 'label': directives.unchanged, } def run(self): node = openstreetmap() if 'id' in self.options: node['id'] = self.options['id'] else: msg = ('openstreetmap directive needs uniqueue id for map data') return [document.reporter.warning(msg, line=self.lineno)] return [node] def visit_openstreetmap_node(self, node): self.body.append("<div id='openstreetmap' style='color:red'>OpenStreetMap directive</div>") def depart_openstreetmap_node(self, node): pass def setup(app): app.add_node(openstreetmap, html=(visit_openstreetmap_node, depart_openstreetmap_node)) app.add_directive('openstreetmap', OpenStreetMapDirective)
Remove marker from option spec
Remove marker from option spec
Python
bsd-2-clause
kenhys/sphinxcontrib-openstreetmap,kenhys/sphinxcontrib-openstreetmap
8ce1def3020570c8a3e370261fc9c7f027202bdf
owapi/util.py
owapi/util.py
""" Useful utilities. """ import json from kyokai import Request from kyokai.context import HTTPRequestContext def jsonify(func): """ JSON-ify the response from a function. """ async def res(ctx: HTTPRequestContext): result = await func(ctx) assert isinstance(ctx.request, Request) if isinstance(result, tuple): new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}}, **result[0]} if len(result) == 1: return json.dumps(new_result), 200, {"Content-Type": "application/json"} elif len(result) == 2: return json.dumps(new_result[0]), result[1], {"Content-Type": "application/json"} else: return json.dumps(new_result), result[1], {**{"Content-Type": "application/json"}, **result[2]} else: new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}}, **result} return json.dumps(new_result), 200, {"Content-Type": "application/json"} return res
""" Useful utilities. """ import json import aioredis from kyokai import Request from kyokai.context import HTTPRequestContext async def with_cache(ctx: HTTPRequestContext, func, *args, expires=300): """ Run a coroutine with cache. Stores the result in redis. """ assert isinstance(ctx.redis, aioredis.Redis) built = func.__name__ + repr(args) # Check for the key. # Uses a simple func name + repr(args) as the key to use. got = await ctx.redis.get(built) if got: return got.decode() # Call the function. result = await func(ctx, *args) # Store the result as cached. await ctx.redis.set(built, result, expire=expires) return result def jsonify(func): """ JSON-ify the response from a function. """ async def res(ctx: HTTPRequestContext): result = await func(ctx) assert isinstance(ctx.request, Request) if isinstance(result, tuple): new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}}, **result[0]} if len(result) == 1: return json.dumps(new_result), 200, {"Content-Type": "application/json"} elif len(result) == 2: return json.dumps(new_result[0]), result[1], {"Content-Type": "application/json"} else: return json.dumps(new_result), result[1], {**{"Content-Type": "application/json"}, **result[2]} else: new_result = {**{"_request": {"route": ctx.request.path, "api_ver": 1}}, **result} return json.dumps(new_result), 200, {"Content-Type": "application/json"} return res
Add with_cache function for storing cached data
Add with_cache function for storing cached data
Python
mit
azah/OWAPI,SunDwarf/OWAPI
106ea580471387a3645877f52018ff2880db34f3
live_studio/config/forms.py
live_studio/config/forms.py
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, 'widgets': { 'base': forms.RadioSelect(), 'distribution': forms.RadioSelect(), 'media_type': forms.RadioSelect(), 'architecture': forms.RadioSelect(), 'installer': forms.RadioSelect(), }, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
Use radio buttons for most of the interface.
Use radio buttons for most of the interface. Signed-off-by: Chris Lamb <[email protected]>
Python
agpl-3.0
lamby/live-studio,lamby/live-studio,lamby/live-studio,debian-live/live-studio,debian-live/live-studio,debian-live/live-studio
6b01cdc18fce9277991fc5628f1d6c904ad47ee6
BuildAndRun.py
BuildAndRun.py
import os import subprocess name = "gobuildmaster" current_hash = "" if os.path.isfile('hash'): current_hash = open('hash').readlines()[0] new_hash = os.popen('git rev-parse HEAD').readlines()[0] open('hash','w').write(new_hash) # Move the old version over for line in os.popen('cp ' + name + ' old' + name).readlines(): print line.strip() # Rebuild for line in os.popen('go build').readlines(): print line.strip() size_1 = os.path.getsize('./old' + name) size_2 = os.path.getsize('./' + name) running = len(os.popen('ps -ef | grep ' + name).readlines()) > 3 if size_1 != size_2 or new_hash != current_hash or not running: if not running: for line in os.popen('cat out.txt | mail -s "Crash Report ' + name + '" [email protected]').readlines(): pass for line in os.popen('echo "" > out.txt').readlines(): pass for line in os.popen('killall ' + name).readlines(): pass subprocess.Popen(['./' + name, "--quiet=false"])
import os import subprocess name = "gobuildmaster" current_hash = "" if os.path.isfile('hash'): current_hash = open('hash').readlines()[0] new_hash = os.popen('git rev-parse HEAD').readlines()[0] open('hash','w').write(new_hash) # Move the old version over for line in os.popen('cp ' + name + ' old' + name).readlines(): print line.strip() # Rebuild for line in os.popen('go build').readlines(): print line.strip() size_1 = os.path.getsize('./old' + name) size_2 = os.path.getsize('./' + name) lines = os.popen('ps -ef | grep ' + name).readlines() running = False for line in lines: if "./" + name in line: running = True if size_1 != size_2 or new_hash != current_hash or not running: if not running: for line in os.popen('cat out.txt | mail -s "Crash Report ' + name + '" [email protected]').readlines(): pass for line in os.popen('echo "" > out.txt').readlines(): pass for line in os.popen('killall ' + name).readlines(): pass subprocess.Popen(['./' + name, "--quiet=false"])
Fix running detection for master
Fix running detection for master
Python
apache-2.0
brotherlogic/gobuildmaster,brotherlogic/gobuildmaster,brotherlogic/gobuildmaster
ea453e4c050771ee96bd99f15e4f42449f28c7f2
tests/TestAssignmentRegex.py
tests/TestAssignmentRegex.py
import unittest import src import resources.Constants as const class TestAssignments(unittest.TestCase): string_file = '' int_file = '' @classmethod def setUpClass(cls): cls.string_file = src.main("../resources/BasicStringAssignment.txt") cls.int_file = src.main("../resources/BasicIntegerAssignment.txt") def testString(self): test_string = '"test123ID"' exp_result = [("test123ID", const.STRING)] result = src.lexer(test_string) self.assertEqual(result, exp_result) def testVariableAssignmentString(self): exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)] result = src.lexer(self.string_file) self.assertEqual(result, exp_result) def testDoubleQuoteString(self): test_string = '""""' exp_result = [('""', const.STRING)] result = src.lexer(test_string) self.assertEqual(result, exp_result)
import unittest import src import resources.Constants as const class TestAssignments(unittest.TestCase): string_file = '' int_file = '' @classmethod def setUpClass(cls): cls.string_file = src.main("./resources/BasicStringAssignment.txt") cls.int_file = src.main("./resources/BasicIntegerAssignment.txt") def testString(self): test_string = '"test123ID"' exp_result = [("test123ID", const.STRING)] result = src.lexer(test_string) self.assertEqual(result, exp_result) def testVariableAssignmentString(self): exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)] result = src.lexer(self.string_file) self.assertEqual(result, exp_result) def testDoubleQuoteString(self): test_string = '""""' exp_result = [('""', const.STRING)] result = src.lexer(test_string) self.assertEqual(result, exp_result)
Test changing file path for nosetests
Test changing file path for nosetests
Python
bsd-3-clause
sky-uk/bslint
3ca46f1407d8984ca5cbd1eb0581765386533d71
observatory/rcos/tests/test_rcos.py
observatory/rcos/tests/test_rcos.py
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
import pytest from django.core.urlresolvers import reverse @pytest.mark.django_db def test_homepage(client): for url in ( "/", "/donor", "/students", "/courses", "/talks", "/programming-competition", "/achievements", "/urp-application", "/links-and-contacts", "/talk-sign-up", "/irc", "/faq", "/calendar", "/howtojoin", "/past-projects", ): #Load Site response = client.get(url) #Check for normal processing assert response.status_code in [200, 301]
Add / to rcos tests
rcos: Add / to rcos tests
Python
isc
rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory
5e3c6d6ab892a87ca27c05c01b39646bd339b3f2
tests/test_event.py
tests/test_event.py
import unittest from event import Event class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): called = False def listener(): nonlocal called called = True event = Event() event.connect(listener) event.fire() self.assertTrue(called) def test_a_listener_is_passed_correct_parameters(self): params = () def listener(*args, **kwargs): nonlocal params params = (args, kwargs) event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), params)
import unittest from event import Event class Mock: def __init__(self): self.called = False self.params = () def __call__(self, *args, **kwargs): self.called = True self.params = (args, kwargs) class EventTest(unittest.TestCase): def test_a_listener_is_notified_when_event_is_raised(self): listener = Mock() event = Event() event.connect(listener) event.fire() self.assertTrue(listener.called) def test_a_listener_is_passed_correct_parameters(self): listener = Mock() event = Event() event.connect(listener) event.fire(5, shape="square") self.assertEquals(((5, ), {"shape": "square"}), listener.params)
Refactor a lightweight Mock class.
Refactor a lightweight Mock class.
Python
mit
bsmukasa/stock_alerter
291923f4ad1fc0041284a73d6edad43e6047fafc
workspace/commands/status.py
workspace/commands/status.py
from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
from __future__ import absolute_import import os import logging from workspace.commands import AbstractCommand from workspace.commands.helpers import ProductPager from workspace.scm import stat_repo, repos, product_name, all_branches, is_repo log = logging.getLogger(__name__) class Status(AbstractCommand): """ Show status on current product or all products in workspace """ alias = 'st' def run(self): try: scm_repos = repos() in_repo = is_repo(os.getcwd()) optional = len(scm_repos) == 1 pager = ProductPager(optional=optional) for repo in scm_repos: stat_path = os.getcwd() if in_repo else repo output = stat_repo(stat_path, True) nothing_to_commit = 'nothing to commit' in output and 'Your branch is ahead of' not in output branches = all_branches(repo) child_branches = [b for b in branches if '@' in b] if len(child_branches) > 1 or len(scm_repos) == 1: if nothing_to_commit: output = '# Branches: %s' % ' '.join(branches) nothing_to_commit = False elif len(branches) > 1: output = '# Branches: %s\n#\n%s' % (' '.join(branches), output) if output and not nothing_to_commit: pager.write(product_name(repo), output) finally: pager.close_and_wait()
Fix bug to display all branches when there is only 1 repo
Fix bug to display all branches when there is only 1 repo
Python
mit
maxzheng/workspace-tools
d0ad6a4310e37a3d6dc853d2930640053f89ae05
tests/benchmarks/__init__.py
tests/benchmarks/__init__.py
"""Benchmarks for graphql Benchmarks are disabled (only executed as tests) by default in setup.cfg. You can enable them with --benchmark-enable if your want to execute them. E.g. in order to execute all the benchmarks with tox using Python 3.7:: tox -e py37 -- -k benchmarks --benchmark-enable """
Add docstring explaining how to run benchmarks
Add docstring explaining how to run benchmarks
Python
mit
graphql-python/graphql-core
b18bdf11141cf47319eed9ba2b861ebc287cf5ff
pyqs/utils.py
pyqs/utils.py
import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
import base64 import json import pickle def decode_message(message): message_body = message.get_body() json_body = json.loads(message_body) if 'task' in message_body: return json_body else: # Fallback to processing celery messages return decode_celery_message(json_body) def decode_celery_message(json_task): message = base64.decodestring(json_task['body']) try: return json.loads(message) except ValueError: pass return pickle.loads(message) def function_to_import_path(function): return "{}.{}".format(function.__module__, function.func_name)
Add fallback for loading json encoded celery messages
Add fallback for loading json encoded celery messages
Python
mit
spulec/PyQS
a163d7970edbd4a483ddf7d8e20a7c0ab682e0c7
package/setup.py
package/setup.py
#!/usr/bin/env python import os import sys import {{ project.repo_name }} try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() readme = open('README.rst', 'rt').read() history = open('HISTORY.rst', 'rt').read() setup( name='{{ project.repo_name }}', version={{ project.repo_name }}.__version__, description='{{ project.project_short_description }}', long_description=readme + '\n\n' + history, author='{{ project.full_name }}', author_email='{{ project.email }}', url='https://github.com/{{ project.github_username }}/{{ project.repo_name }}', packages=[ '{{ project.repo_name }}', ], include_package_data=True, install_requires=[ ], license="BSD", zip_safe=False, keywords='{{ project.repo_name }}', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ], test_suite='tests', )
#!/usr/bin/env python import os import sys import {{ project.repo_name }} try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() readme = open('README.rst', 'rt').read() history = open('HISTORY.rst', 'rt').read() setup( name='{{ project.repo_name }}', version={{ project.repo_name }}.__version__, description='{{ project.project_short_description }}', long_description=readme + '\n\n' + history, author='{{ project.full_name }}', author_email='{{ project.email }}', url='https://github.com/{{ project.github_username }}/{{ project.repo_name }}', packages=[ '{{ project.repo_name }}', ], package_dir={'{{ project.repo_name }}': '{{ project.repo_name }}'}, include_package_data=True, install_requires=[ ], license="BSD", zip_safe=False, keywords='{{ project.repo_name }}', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ], test_suite='tests', )
Add package_dir which uses project.repo_url.
Add package_dir which uses project.repo_url.
Python
bsd-2-clause
rockymeza/cookiecutter-djangoapp,aeroaks/cookiecutter-pyqt4,rockymeza/cookiecutter-djangoapp
77b87f5657583a5418d57f712b52bbcd6e9421aa
puzzle.py
puzzle.py
#!/usr/bin/python3 class Puzzle: def get_all_exits(self, graph): exits = [] for key, value in graph.items(): for item in value: if 'Exit' in item: exits += item return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
#!/usr/bin/python3 class Puzzle: def get_all_exits(self, graph): exits = [] for root_node, connected_nodes in graph.items(): for node in connected_nodes: if 'Exit' in node: exits += node return exits def find_all_paths(self, graph, start, end, path=None): if path is None: path = [] path = path + [start] if start == end: return [path] if start not in graph: return [] paths = [] for node in graph[start]: if node not in path: newpaths = self.find_all_paths(graph, node, end, path) for newpath in newpaths: paths.append(newpath) return paths def solve(self, graph=None): unique_paths = [] for exit in self.get_all_exits(graph): for start, connected_nodes in graph.items(): unique_paths += self.find_all_paths(graph, start, exit) return unique_paths
Rename vars in get_all_exits to make it more clear
Rename vars in get_all_exits to make it more clear
Python
mit
aaronshaver/graph-unique-paths
0418027b186f146ff75170ecf5c8e63c3dab3cc1
treeherder/client/setup.py
treeherder/client/setup.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup version = '1.1' setup(name='treeherder-client', version=version, description="Python library to submit data to treeherder-service", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='Mozilla Automation and Testing Team', author_email='[email protected]', url='https://github.com/mozilla/treeherder-client', license='MPL', packages=['thclient'], zip_safe=False, install_requires=['oauth2'], test_suite='thclient.tests', tests_require=["mock"], )
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from setuptools import setup version = '1.1' setup(name='treeherder-client', version=version, description="Python library to submit data to treeherder-service", long_description="""\ """, classifiers=[ 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', ], keywords='', author='Mozilla Automation and Testing Team', author_email='[email protected]', url='https://github.com/mozilla/treeherder-client', license='MPL', packages=['thclient'], zip_safe=False, install_requires=['oauth2'], test_suite='thclient.tests', tests_require=["mock"], )
Add various classifications for pypi
Add various classifications for pypi
Python
mpl-2.0
rail/treeherder,glenn124f/treeherder,parkouss/treeherder,tojon/treeherder,adusca/treeherder,akhileshpillai/treeherder,glenn124f/treeherder,adusca/treeherder,avih/treeherder,wlach/treeherder,moijes12/treeherder,akhileshpillai/treeherder,avih/treeherder,vaishalitekale/treeherder,vaishalitekale/treeherder,rail/treeherder,moijes12/treeherder,edmorley/treeherder,tojon/treeherder,deathping1994/treeherder,jgraham/treeherder,tojonmz/treeherder,moijes12/treeherder,jgraham/treeherder,moijes12/treeherder,wlach/treeherder,wlach/treeherder,kapy2010/treeherder,wlach/treeherder,parkouss/treeherder,deathping1994/treeherder,adusca/treeherder,tojonmz/treeherder,vaishalitekale/treeherder,edmorley/treeherder,jgraham/treeherder,deathping1994/treeherder,glenn124f/treeherder,KWierso/treeherder,wlach/treeherder,kapy2010/treeherder,deathping1994/treeherder,parkouss/treeherder,gbrmachado/treeherder,parkouss/treeherder,glenn124f/treeherder,akhileshpillai/treeherder,sylvestre/treeherder,adusca/treeherder,avih/treeherder,avih/treeherder,KWierso/treeherder,moijes12/treeherder,sylvestre/treeherder,akhileshpillai/treeherder,gbrmachado/treeherder,vaishalitekale/treeherder,tojonmz/treeherder,parkouss/treeherder,rail/treeherder,sylvestre/treeherder,jgraham/treeherder,rail/treeherder,jgraham/treeherder,avih/treeherder,KWierso/treeherder,gbrmachado/treeherder,moijes12/treeherder,gbrmachado/treeherder,tojon/treeherder,glenn124f/treeherder,deathping1994/treeherder,sylvestre/treeherder,edmorley/treeherder,tojonmz/treeherder,gbrmachado/treeherder,vaishalitekale/treeherder,tojon/treeherder,tojonmz/treeherder,gbrmachado/treeherder,vaishalitekale/treeherder,glenn124f/treeherder,kapy2010/treeherder,kapy2010/treeherder,tojonmz/treeherder,sylvestre/treeherder,avih/treeherder,edmorley/treeherder,kapy2010/treeherder,KWierso/treeherder,akhileshpillai/treeherder,rail/treeherder,parkouss/treeherder,deathping1994/treeherder,jgraham/treeherder,akhileshpillai/treeherder,wlach/treeherder,rail/treeherder,adusca/treeherder,adusca/treeherder,sylvestre/treeherder
f7e2bcf941e2a15a3bc28ebf3f15244df6f0d758
posts/versatileimagefield.py
posts/versatileimagefield.py
from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10, image.size[1] - 10 - fontsize), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
Use custom font for watermark
Use custom font for watermark Signed-off-by: Michal ČihaΕ™ <[email protected]>
Python
agpl-3.0
nijel/photoblog,nijel/photoblog
01e62119750d0737e396358dbf45727dcbb5732f
tests/__init__.py
tests/__init__.py
import sys import unittest def main(): if sys.version_info[0] >= 3: from unittest.main import main main(module=None) else: unittest.main() if __name__ == '__main__': main()
from unittest.main import main if __name__ == '__main__': main(module=None, verbosity=2)
Drop Python 2 support in tests
Drop Python 2 support in tests
Python
bsd-3-clause
retext-project/pymarkups,mitya57/pymarkups
7241801672c9ff40526b558366ba8869ba86cf36
project/circleci_settings.py
project/circleci_settings.py
# -*- coding: utf-8 -*- DEBUG = True LOCAL_DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'circle_test', 'USER': 'circleci', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': '5432', } } LOCALLY_INSTALLED_APPS = [ ] ENABLE_EMAILS = False LOCALLY_ALLOWED_HOSTS = [ ] ADMINS = []
# -*- coding: utf-8 -*- DEBUG = True LOCAL_DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'circle_test', 'USER': 'circleci', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': '5432', } } LOCALLY_INSTALLED_APPS = [ ] ENABLE_EMAILS = False LOCALLY_ALLOWED_HOSTS = [ ] ADMINS = [] SECRET_KEY = 'CHANGE ME'
Add missing secret key to circle ci settings
Add missing secret key to circle ci settings
Python
mit
magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3
2b74c8714b659ccf5faa615e9b5c4c4559f8d9c8
artbot_website/views.py
artbot_website/views.py
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start') return render(request, 'index.html', {'events': events})
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start') return render(request, 'index.html', {'events': events})
Index now only displays published articles.
Index now only displays published articles.
Python
mit
coreymcdermott/artbot,coreymcdermott/artbot
285eeb1c7565f8fa9fb6ba38ed843601f81cdf4e
tmc/models/document_topic.py
tmc/models/document_topic.py
# -*- coding: utf-8 -*- from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
# -*- coding: utf-8 -*- from odoo import api, fields, models class DocumentTopic(models.Model): _name = 'tmc.document_topic' _description = 'document_topic' _inherit = 'tmc.category' _order = 'name' first_parent_id = fields.Many2one( comodel_name='tmc.document_topic', compute='_compute_first_parent', store=True ) document_ids = fields.Many2many( comodel_name='tmc.document', relation='document_main_topic_rel', column1='main_topic_ids' ) parent_id = fields.Many2one( comodel_name='tmc.document_topic', string='Main Topic' ) child_ids = fields.One2many( comodel_name='tmc.document_topic', inverse_name='parent_id' ) important = fields.Boolean() @api.multi @api.depends('parent_id', 'parent_id.parent_id') def _compute_first_parent(self): for document_topic in self: first_parent_id = False parent = document_topic.parent_id while parent: first_parent_id = parent.id parent = parent.parent_id document_topic.first_parent_id = first_parent_id
Order document topics by name
[IMP] Order document topics by name
Python
agpl-3.0
tmcrosario/odoo-tmc
ee9f1058107f675f7f12f822ead3feb78ec10d9b
wagtail/utils/urlpatterns.py
wagtail/utils/urlpatterns.py
from __future__ import absolute_import, unicode_literals from functools import update_wrapper def decorate_urlpatterns(urlpatterns, decorator): for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): decorate_urlpatterns(pattern.url_patterns, decorator) if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
from __future__ import absolute_import, unicode_literals from functools import update_wrapper from django import VERSION as DJANGO_VERSION def decorate_urlpatterns(urlpatterns, decorator): """Decorate all the views in the passed urlpatterns list with the given decorator""" for pattern in urlpatterns: if hasattr(pattern, 'url_patterns'): # this is an included RegexURLResolver; recursively decorate the views # contained in it decorate_urlpatterns(pattern.url_patterns, decorator) if DJANGO_VERSION < (1, 10): # Prior to Django 1.10, RegexURLPattern accepted both strings and callables as # the callback parameter; `callback` is a property that consistently returns it as # a callable. # # * if RegexURLPattern was given a string, _callback will be None, and will be # populated on the first call to the `callback` property # * if RegexURLPattern was given a callable, _callback will be set to that callable, # and the `callback` property will return it # # In either case, we wrap the result of `callback` and write it back to `_callback`, # so that future calls to `callback` will return our wrapped version. if hasattr(pattern, '_callback'): pattern._callback = update_wrapper(decorator(pattern.callback), pattern.callback) else: # In Django 1.10 and above, RegexURLPattern only accepts a callable as the callback # parameter; this is directly accessible as the `callback` attribute. if getattr(pattern, 'callback', None): pattern.callback = update_wrapper(decorator(pattern.callback), pattern.callback) return urlpatterns
Test for RegexURLPattern.callback on Django 1.10
Test for RegexURLPattern.callback on Django 1.10 Thanks Paul J Stevens for the initial patch, Tim Graham for review and Matt Westcott for tweak of initial patch
Python
bsd-3-clause
nealtodd/wagtail,torchbox/wagtail,nutztherookie/wagtail,nealtodd/wagtail,kurtw/wagtail,mixxorz/wagtail,rsalmaso/wagtail,kurtw/wagtail,jnns/wagtail,kurtw/wagtail,nutztherookie/wagtail,wagtail/wagtail,Toshakins/wagtail,mixxorz/wagtail,gasman/wagtail,iansprice/wagtail,rsalmaso/wagtail,thenewguy/wagtail,kaedroho/wagtail,thenewguy/wagtail,mixxorz/wagtail,gasman/wagtail,kurtrwall/wagtail,nealtodd/wagtail,chrxr/wagtail,mikedingjan/wagtail,mikedingjan/wagtail,timorieber/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,FlipperPA/wagtail,takeflight/wagtail,nilnvoid/wagtail,wagtail/wagtail,rsalmaso/wagtail,chrxr/wagtail,torchbox/wagtail,Toshakins/wagtail,wagtail/wagtail,FlipperPA/wagtail,Toshakins/wagtail,takeflight/wagtail,iansprice/wagtail,chrxr/wagtail,jnns/wagtail,kurtrwall/wagtail,gasman/wagtail,takeflight/wagtail,mixxorz/wagtail,Toshakins/wagtail,wagtail/wagtail,mikedingjan/wagtail,thenewguy/wagtail,nimasmi/wagtail,iansprice/wagtail,thenewguy/wagtail,jnns/wagtail,kurtw/wagtail,nilnvoid/wagtail,zerolab/wagtail,thenewguy/wagtail,nimasmi/wagtail,nutztherookie/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,kaedroho/wagtail,mixxorz/wagtail,kaedroho/wagtail,zerolab/wagtail,chrxr/wagtail,timorieber/wagtail,nimasmi/wagtail,torchbox/wagtail,kaedroho/wagtail,iansprice/wagtail,nutztherookie/wagtail,jnns/wagtail,FlipperPA/wagtail,rsalmaso/wagtail,kaedroho/wagtail,wagtail/wagtail,takeflight/wagtail,gasman/wagtail,timorieber/wagtail,zerolab/wagtail,kurtrwall/wagtail,nealtodd/wagtail,zerolab/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,gasman/wagtail,torchbox/wagtail,zerolab/wagtail,nimasmi/wagtail
558a25b6f3b77d6a1b087819dc40f1aa7584e7fb
sky/tools/release_packages.py
sky/tools/release_packages.py
#!/usr/bin/env python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # See https://github.com/domokit/sky_engine/wiki/Release-process import os import subprocess import sys def main(): engine_root = os.path.abspath('.') if not os.path.exists(os.path.join(engine_root, 'sky')): print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root return 1 pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub') if args.publish: subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites')) if __name__ == '__main__': sys.exit(main())
#!/usr/bin/env python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # See https://github.com/domokit/sky_engine/wiki/Release-process import os import subprocess import sys def main(): engine_root = os.path.abspath('.') if not os.path.exists(os.path.join(engine_root, 'sky')): print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root return 1 pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub') if args.publish: subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/flx')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites')) if __name__ == '__main__': sys.exit(main())
Add FLX to the release train
Add FLX to the release train
Python
bsd-3-clause
krisgiesing/sky_engine,devoncarew/engine,abarth/sky_engine,tvolkert/engine,Hixie/sky_engine,devoncarew/engine,mpcomplete/flutter_engine,mpcomplete/engine,devoncarew/sky_engine,chinmaygarde/sky_engine,jamesr/sky_engine,flutter/engine,chinmaygarde/sky_engine,mpcomplete/engine,lyceel/engine,devoncarew/engine,Hixie/sky_engine,tvolkert/engine,krisgiesing/sky_engine,mikejurka/engine,mpcomplete/engine,mpcomplete/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,jamesr/sky_engine,flutter/engine,cdotstout/sky_engine,aam/engine,lyceel/engine,devoncarew/engine,mpcomplete/flutter_engine,Hixie/sky_engine,rmacnak-google/engine,jason-simmons/flutter_engine,jamesr/sky_engine,abarth/sky_engine,tvolkert/engine,jason-simmons/flutter_engine,lyceel/engine,lyceel/engine,aam/engine,jason-simmons/flutter_engine,jason-simmons/sky_engine,jason-simmons/sky_engine,mikejurka/engine,jason-simmons/flutter_engine,jamesr/flutter_engine,mikejurka/engine,aam/engine,devoncarew/sky_engine,chinmaygarde/sky_engine,mpcomplete/engine,jason-simmons/sky_engine,chinmaygarde/flutter_engine,mpcomplete/engine,chinmaygarde/sky_engine,jason-simmons/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,tvolkert/engine,krisgiesing/sky_engine,flutter/engine,jason-simmons/sky_engine,tvolkert/engine,abarth/sky_engine,flutter/engine,krisgiesing/sky_engine,aam/engine,krisgiesing/sky_engine,aam/engine,Hixie/sky_engine,mikejurka/engine,krisgiesing/sky_engine,krisgiesing/sky_engine,cdotstout/sky_engine,rmacnak-google/engine,devoncarew/sky_engine,Hixie/sky_engine,devoncarew/sky_engine,rmacnak-google/engine,mikejurka/engine,chinmaygarde/flutter_engine,mpcomplete/flutter_engine,lyceel/engine,cdotstout/sky_engine,abarth/sky_engine,abarth/sky_engine,aam/engine,jason-simmons/flutter_engine,mikejurka/engine,flutter/engine,Hixie/sky_engine,aam/engine,devoncarew/sky_engine,rmacnak-google/engine,mikejurka/engine,jamesr/flutter_engine,jamesr/flutter_engine,devoncarew/sky_engine,jason-simmons/flutter_engine,devoncarew/engine,mpcomplete/engine,flutter/engine,jamesr/sky_engine,cdotstout/sky_engine,mikejurka/engine,flutter/engine,mpcomplete/flutter_engine,devoncarew/engine,aam/engine,devoncarew/sky_engine,jason-simmons/sky_engine,jason-simmons/sky_engine,tvolkert/engine,rmacnak-google/engine,cdotstout/sky_engine,jason-simmons/flutter_engine,chinmaygarde/sky_engine,lyceel/engine,flutter/engine,rmacnak-google/engine,cdotstout/sky_engine,Hixie/sky_engine,mpcomplete/flutter_engine,jamesr/flutter_engine,cdotstout/sky_engine,jason-simmons/sky_engine,jamesr/flutter_engine,abarth/sky_engine,jamesr/flutter_engine,devoncarew/engine,tvolkert/engine,chinmaygarde/sky_engine,lyceel/engine,mikejurka/engine,chinmaygarde/flutter_engine,mpcomplete/engine,rmacnak-google/engine,chinmaygarde/flutter_engine,abarth/sky_engine,Hixie/sky_engine,chinmaygarde/flutter_engine,chinmaygarde/flutter_engine,jamesr/flutter_engine,jamesr/sky_engine,mpcomplete/engine,chinmaygarde/flutter_engine,chinmaygarde/sky_engine
ef81c3eb8b54baeac5ef22843de736345c4d5523
snapboard/middleware/cache.py
snapboard/middleware/cache.py
from django.conf import settings from django.core.cache import cache from django.template import Template from django.template.context import RequestContext from snapboard.utils import get_response_cache_key, get_prefix_cache_key class CachedTemplateMiddleware(object): def process_view(self, request, view_func, view_args, view_kwargs): # TODO: In DEV don't try to grab media out of the cache. if settings.DEBUG and "." in request.path: return response = None if request.method == "GET": prefix_key = get_prefix_cache_key(request) prefix = cache.get(prefix_key, "0") response_key = get_response_cache_key(prefix, request) response = cache.get(response_key) if response is None: response = view_func(request, *view_args, **view_kwargs) if response['content-type'].startswith('text/html'): t = Template(response.content) response.content = t.render(RequestContext(request)) return response
from django.conf import settings from django.core.cache import cache from django.template import Template from django.template.context import RequestContext from snapboard.utils import get_response_cache_key, get_prefix_cache_key class CachedTemplateMiddleware(object): def process_view(self, request, view_func, view_args, view_kwargs): # TODO: In DEV don't try to grab media out of the cache. if settings.DEBUG and "." in request.path: return # TODO: I can imagine a problem with this where a user writes {% in his post %} ... which would then be rendered on the second pass. response = None if request.method == "GET": prefix_key = get_prefix_cache_key(request) prefix = cache.get(prefix_key, "0") response_key = get_response_cache_key(prefix, request) response = cache.get(response_key) if response is None: response = view_func(request, *view_args, **view_kwargs) if response['content-type'].startswith('text/html'): t = Template(response.content) response.content = t.render(RequestContext(request)) return response
Comment about a possible attack vector.
Comment about a possible attack vector.
Python
bsd-3-clause
johnboxall/snapboard,johnboxall/snapboard
d2c2208b39c5715deebf8d24d5fa9096a945bdcd
script.py
script.py
import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') def cli(code, printed): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('')
import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') def cli(code, printed, remove_builtins): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('')
Add builtin removal as an option to cli
Add builtin removal as an option to cli
Python
mit
LaurEars/codegrapher
1e8ecd09ce6dc44c4955f8bb2f81aa65232ad9a0
multi_schema/management/commands/loaddata.py
multi_schema/management/commands/loaddata.py
from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate()
from django.core.management.commands import loaddata from django.core.management.base import CommandError from django.db import DatabaseError from optparse import make_option from ...models import Schema, template_schema class Command(loaddata.Command): option_list = loaddata.Command.option_list + ( make_option('--schema', action='store', dest='schema', help='Specify which schema to load schema-aware models to', default='__template__', ), ) def handle(self, *app_labels, **options): schema_name = options.get('schema') if schema_name == '__template__': # Hmm, we don't want to accidentally write data to this, so # we should raise an exception if we are going to be # writing any schema-aware objects. schema = None else: try: schema = Schema.objects.get(schema=options.get('schema')) except Schema.DoesNotExist: raise CommandError('No Schema found named "%s"' % schema_name) schema.activate() super(Command, self).handle(*app_labels, **options) if schema: schema.deactivate() for schema in Schema.objects.all(): schema.create_schema()
Fix indenting. Create any schemas that were just loaded.
Fix indenting. Create any schemas that were just loaded.
Python
bsd-3-clause
luzfcb/django-boardinghouse,luzfcb/django-boardinghouse,luzfcb/django-boardinghouse
c5d22fd143f952ce5e0c86b9e8bce4a06fe47063
bigsi/storage/__init__.py
bigsi/storage/__init__.py
from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage from bigsi.storage.rocksdb import RocksDBStorage def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
from bigsi.storage.redis import RedisStorage try: from bigsi.storage.berkeleydb import BerkeleyDBStorage except ModuleNotFoundError: pass try: from bigsi.storage.rocksdb import RocksDBStorage except ModuleNotFoundError: pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
Allow import without optional requirements
Allow import without optional requirements
Python
mit
Phelimb/cbg,Phelimb/cbg,Phelimb/cbg,Phelimb/cbg
33505f9b4dfeead0b01ee1b8cf3f8f228476e866
openpassword/crypt_utils.py
openpassword/crypt_utils.py
from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] print(data) cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
from Crypto.Cipher import AES def decrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.decrypt(data) def encrypt(data, key_iv): key = key_iv[0:16] iv = key_iv[16:] cipher = AES.new(key, AES.MODE_CBC, iv) return cipher.encrypt(data)
Remove print statement from crypto utils...
Remove print statement from crypto utils...
Python
mit
openpassword/blimey,openpassword/blimey
f2fc7f1015fc24fdbb69069ac74a21437e94657b
xmantissa/plugins/sineoff.py
xmantissa/plugins/sineoff.py
from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), loginInterfaces=(), themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
from axiom import iaxiom, userbase from xmantissa import website, offering, provisioning from sine import sipserver, sinetheme sineproxy = provisioning.BenefactorFactory( name = u'sineproxy', description = u'Sine SIP Proxy', benefactorClass = sipserver.SineBenefactor) plugin = offering.Offering( name = u"Sine", description = u""" The Sine SIP proxy and registrar. """, siteRequirements = ( (userbase.IRealm, userbase.LoginSystem), (None, website.WebSite), (None, sipserver.SIPServer)), appPowerups = (sipserver.SinePublicPage, ), benefactorFactories = (sineproxy,), themes = (sinetheme.XHTMLDirectoryTheme('base'),) )
Revert 5505 - introduced numerous regressions into the test suite
Revert 5505 - introduced numerous regressions into the test suite
Python
mit
habnabit/divmod-sine,twisted/sine
b2c8acb79ea267f9777f1f370b588a1d93b28d86
src/blockdiag_sphinxhelper.py
src/blockdiag_sphinxhelper.py
# -*- coding: utf-8 -*- # Copyright 2011 Takeshi KOMIYA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from blockdiag import command, parser, builder, drawer from blockdiag.utils import collections from blockdiag.utils.fontmap import FontMap from blockdiag.utils.rst.directives import blockdiag, BlockdiagDirective (command, parser, builder, drawer, collections, FontMap, blockdiag, BlockdiagDirective)
# -*- coding: utf-8 -*- # Copyright 2011 Takeshi KOMIYA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import blockdiag.parser import blockdiag.builder import blockdiag.drawer core = blockdiag import blockdiag.utils.bootstrap import blockdiag.utils.collections import blockdiag.utils.fontmap utils = blockdiag.utils from blockdiag.utils.rst import nodes from blockdiag.utils.rst import directives # FIXME: obsoleted interface (keep for compatibility) from blockdiag import command, parser, builder, drawer from blockdiag.utils import collections from blockdiag.utils.fontmap import FontMap from blockdiag.utils.rst.directives import blockdiag, BlockdiagDirective (command, parser, builder, drawer, collections, FontMap, blockdiag, BlockdiagDirective)
Update interface for sphinxcontrib module
Update interface for sphinxcontrib module
Python
apache-2.0
aboyett/blockdiag,aboyett/blockdiag
8bfd49c7aef03f6d2ad541f466e9661b6acc5262
staticassets/compilers/sass.py
staticassets/compilers/sass.py
from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True} command = 'sass' params = ['--trace'] def compile(self, asset): if self.compass: self.params.append('--compass') if '.scss' in asset.attributes.extensions: self.params.append('--scss') return super(SassCompiler, self).compile(asset)
from .base import CommandCompiler class SassCompiler(CommandCompiler): content_type = 'text/css' options = {'compass': True, 'scss': False} command = 'sass' params = ['--trace'] def get_args(self): args = super(SassCompiler, self).get_args() if self.compass: args.append('--compass') if self.scss: args.append('--scss') return args
Fix args being appended continuously to SassCompiler
Fix args being appended continuously to SassCompiler
Python
mit
davidelias/django-staticassets,davidelias/django-staticassets,davidelias/django-staticassets
ed23b1a44263de0e0a3b34ead22cd149116c063a
src/ggrc/models/exceptions.py
src/ggrc/models/exceptions.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"Duplicate entry (\'.*\') for key \'([^\']*)\'') matches = duplicate_entry_pattern.search(message) if matches: return u'The value ' + \ matches.group(1) + \ u' is already used for a ' + \ ('title; title' if matches.group(2).startswith('uq_t_') else 'code; code') + \ u' values must be unique.' else: return message else: return message class ValidationError(Exception): pass
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] import re from sqlalchemy.exc import IntegrityError def field_lookup(field_string): """Attempts to find relevant error field for uniqueness constraint error, given SQL error message; broken off from translate_message logic """ output_format = "{0}; {0}" bad_field = 'code' # assumes this field as a default if field_string.startswith('uq_t_'): bad_field = 'title' elif field_string.endswith('email'): bad_field = 'email' return output_format.format(bad_field) def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"Duplicate entry (\'.*\') for key \'([^\']*)\'') matches = duplicate_entry_pattern.search(message) if matches: return u'The value ' + \ matches.group(1) + \ u' is already used for another ' + \ field_lookup(matches.group(2)) + \ u' values must be unique.' else: return message else: return message class ValidationError(Exception): pass
Make uniqueness error message recognize email duplicates.
Make uniqueness error message recognize email duplicates.
Python
apache-2.0
j0gurt/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core
f522a464e3f58a9f2ed235b48382c9db15f66029
eva/layers/residual_block.py
eva/layers/residual_block.py
from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(Merge(mode='sum')([model, block])) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
from keras.layers import Convolution2D, Merge from keras.layers.advanced_activations import PReLU from keras.engine.topology import merge from eva.layers.masked_convolution2d import MaskedConvolution2D def ResidualBlock(model, filters): # 2h -> h block = Convolution2D(filters//2, 1, 1)(model) block = PReLU()(block) # h 3x3 -> h block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block) block = PReLU()(block) # h -> 2h block = Convolution2D(filters, 1, 1)(block) return PReLU()(merge([model, block], mode='sum')) def ResidualBlockList(model, filters, length): for _ in range(length): model = ResidualBlock(model, filters) return model
Use the functional merge; just for formatting
Use the functional merge; just for formatting
Python
apache-2.0
israelg99/eva
8a7a8c3589b1e3bd3a4d8b0dc832178be26117d3
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
mozaik_membership/wizards/base_partner_merge_automatic_wizard.py
# Copyright 2022 ACSONE SA/NV # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) src_partners = ordered_partners[:-1] src_partners.mapped("membership_line_ids")._close(force=True) return super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks )
# Copyright 2022 ACSONE SA/NV # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) dst_partner = ordered_partners[-1] src_partners = ordered_partners[:-1] # since we close the membership we need to keep an instance for the security for p in src_partners: p.force_int_instance_id = p.int_instance_id dst_force_int_instance_id = dst_partner.force_int_instance_id src_partners.mapped("membership_line_ids")._close(force=True) res = super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) # do not modify the force_int_instance_id since it should be empty if # there is a membership_line_id dst_partner.force_int_instance_id = dst_force_int_instance_id return res
Fix the security for the merge after closing memberships
Fix the security for the merge after closing memberships
Python
agpl-3.0
mozaik-association/mozaik,mozaik-association/mozaik
16fe23b18f69e475858a975f3a2e3f743f4b4c57
zipline/__init__.py
zipline/__init__.py
""" Zipline """ # This is *not* a place to dump arbitrary classes/modules for convenience, # it is a place to expose the public interfaces. __version__ = "0.5.11.dev" from . import data from . import finance from . import gens from . import utils from . algorithm import TradingAlgorithm __all__ = [ 'data', 'finance', 'gens', 'utils', 'TradingAlgorithm' ]
# # Copyright 2013 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Zipline """ # This is *not* a place to dump arbitrary classes/modules for convenience, # it is a place to expose the public interfaces. __version__ = "0.5.11.dev" from . import data from . import finance from . import gens from . import utils from . algorithm import TradingAlgorithm __all__ = [ 'data', 'finance', 'gens', 'utils', 'TradingAlgorithm' ]
Add license to module init file.
REL: Add license to module init file.
Python
apache-2.0
iamkingmaker/zipline,ChinaQuants/zipline,chrjxj/zipline,michaeljohnbennett/zipline,ronalcc/zipline,nborggren/zipline,sketchytechky/zipline,joequant/zipline,grundgruen/zipline,zhoulingjun/zipline,alphaBenj/zipline,CDSFinance/zipline,alphaBenj/zipline,umuzungu/zipline,dhruvparamhans/zipline,cmorgan/zipline,otmaneJai/Zipline,magne-max/zipline-ja,enigmampc/catalyst,Scapogo/zipline,euri10/zipline,stkubr/zipline,wilsonkichoi/zipline,zhoulingjun/zipline,keir-rex/zipline,humdings/zipline,jordancheah/zipline,keir-rex/zipline,euri10/zipline,iamkingmaker/zipline,humdings/zipline,kmather73/zipline,mattcaldwell/zipline,ronalcc/zipline,DVegaCapital/zipline,jimgoo/zipline-fork,davidastephens/zipline,MonoCloud/zipline,AlirezaShahabi/zipline,michaeljohnbennett/zipline,jimgoo/zipline-fork,quantopian/zipline,bartosh/zipline,wubr2000/zipline,aajtodd/zipline,dmitriz/zipline,CarterBain/AlephNull,bartosh/zipline,florentchandelier/zipline,semio/zipline,YuepengGuo/zipline,morrisonwudi/zipline,gwulfs/zipline,dhruvparamhans/zipline,chrjxj/zipline,dmitriz/zipline,dkushner/zipline,mattcaldwell/zipline,davidastephens/zipline,erikness/AlephOne,otmaneJai/Zipline,joequant/zipline,enigmampc/catalyst,DVegaCapital/zipline,YuepengGuo/zipline,sketchytechky/zipline,semio/zipline,gwulfs/zipline,CarterBain/AlephNull,florentchandelier/zipline,umuzungu/zipline,StratsOn/zipline,ChinaQuants/zipline,quantopian/zipline,grundgruen/zipline,nborggren/zipline,magne-max/zipline-ja,aajtodd/zipline,kmather73/zipline,CDSFinance/zipline,cmorgan/zipline,StratsOn/zipline,morrisonwudi/zipline,MonoCloud/zipline,jordancheah/zipline,dkushner/zipline,stkubr/zipline,wilsonkichoi/zipline,AlirezaShahabi/zipline,Scapogo/zipline,erikness/AlephOne,wubr2000/zipline
0e36a49d6a53f87cbe71fd5ec9dce524dd638122
fireplace/deck.py
fireplace/deck.py
import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero, name=None): super().__init__(cards) self.hero = hero if name is None: name = "Custom %s" % (hero) self.name = name for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __str__(self): return self.name def __repr__(self): return "<%s (%i cards)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
import logging import random from .card import Card from .enums import GameTag, Zone from .utils import CardList class Deck(CardList): MAX_CARDS = 30 MAX_UNIQUE_CARDS = 2 MAX_UNIQUE_LEGENDARIES = 1 @classmethod def fromList(cls, cards, hero): return cls([Card(card) for card in cards], Card(hero)) def __init__(self, cards, hero): super().__init__(cards) self.hero = hero for card in cards: # Don't use .zone directly as it would double-fill the deck card.tags[GameTag.ZONE] = Zone.DECK def __repr__(self): return "<Deck(hero=%r, count=%i)>" % (self.hero, len(self)) def shuffle(self): logging.info("Shuffling %r..." % (self)) random.shuffle(self)
Drop support for naming Deck objects
Drop support for naming Deck objects
Python
agpl-3.0
smallnamespace/fireplace,Meerkov/fireplace,amw2104/fireplace,Ragowit/fireplace,beheh/fireplace,butozerca/fireplace,Ragowit/fireplace,amw2104/fireplace,liujimj/fireplace,smallnamespace/fireplace,jleclanche/fireplace,oftc-ftw/fireplace,oftc-ftw/fireplace,butozerca/fireplace,NightKev/fireplace,Meerkov/fireplace,liujimj/fireplace
d6a6fc478d9aaea69ff6c1f5be3ebe0c1b34f180
fixlib/channel.py
fixlib/channel.py
import asyncore import util try: import simplejson as json except ImportError: import json class ChannelServer(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest dest.register('close', self.closehook) def handle_accept(self): client = self.accept() SideChannel(client[0], self.dest) def closehook(self, hook, data): print 'HOOK-CLOSE' self.close() class SideChannel(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest self.buffer = None def handle_close(self): self.close() def handle_read(self): raw = self.recv(8192) if raw: msg = util.json_decode(json.loads(raw)) self.dest.queue(msg) self.buffer = {'result': 'done'} def writable(self): return self.buffer def handle_write(self): self.send(json.dumps(self.buffer)) self.close()
import asyncore import util try: import simplejson as json except ImportError: import json class ChannelServer(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest dest.register('close', lambda x, y: self.close()) def handle_accept(self): client = self.accept() SideChannel(client[0], self.dest) class SideChannel(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest self.buffer = None def handle_close(self): self.close() def handle_read(self): raw = self.recv(8192) if raw: msg = util.json_decode(json.loads(raw)) self.dest.queue(msg) self.buffer = {'result': 'done'} def writable(self): return self.buffer def handle_write(self): self.send(json.dumps(self.buffer)) self.close()
Use a lambda as a proxy.
Use a lambda as a proxy.
Python
bsd-3-clause
jvirtanen/fixlib
82756e5314c2768bb3acf03cf542929d23b73f82
bot/logger/message_sender/synchronized.py
bot/logger/message_sender/synchronized.py
import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) self.lock = threading.Lock() def send(self, text): with self.lock: self.sender.send(text)
import threading from bot.logger.message_sender import MessageSender, IntermediateMessageSender class SynchronizedMessageSender(IntermediateMessageSender): """ Thread-safe message sender. Wrap your `MessageSender` with this class and its :func:`send` function will be called in a synchronized way, only by one thread at the same time. """ def __init__(self, sender: MessageSender): super().__init__(sender) # Using a reentrant lock to play safe in case the send function somewhat invokes this send function again # maybe because a send triggers another send on the same message sender. # Note that if this send throws an exception the lock is released when dealing with it from outside, # so this is not a problem. # But if the exception is handled inside this send call, the lock is still hold. self.lock = threading.RLock() def send(self, text): with self.lock: self.sender.send(text)
Use reentrant lock on SynchronizedMessageSender
Use reentrant lock on SynchronizedMessageSender
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
721703801654af88e8b5064d1bc65569ce1555cf
thumbnails/engines/__init__.py
thumbnails/engines/__init__.py
# -*- coding: utf-8 -*- def get_current_engine(): return None
# -*- coding: utf-8 -*- from thumbnails.engines.pillow import PillowEngine def get_current_engine(): return PillowEngine()
Set pillow engine as default
Set pillow engine as default
Python
mit
python-thumbnails/python-thumbnails,relekang/python-thumbnails
4c1b96865f3e5e6660fc41f9170939a02f9b7735
fabfile.py
fabfile.py
from fabric.api import * from fabric.contrib.console import confirm cfg = dict( appengine_dir='appengine-web/src', goldquest_dir='src', appengine_token='', ) def update(): # update to latest code from repo local('git pull') def test(): local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg) # jslint # pychecker # run jasmine tests def compile(): # Minimize javascript using google closure. local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg) def deploy_appengine(): local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg) def prepare_deploy(): test() compile() def deploy(): update() prepare_deploy() deploy_appengine() # tweet about release
from fabric.api import * from fabric.contrib.console import confirm import simplejson cfg = dict( appengine_dir='appengine-web/src', goldquest_dir='src', oauth_cfg_path='/Users/olle/.appcfg_oauth2_tokens', appengine_refresh_token='', ) def read_appcfg_oauth(): fp = open(cfg['oauth_cfg_path']) oauth_cfg = simplejson.load(fp) cfg['appengine_refresh_token'] = oauth_cfg['refresh_token'] def update(): # update to latest code from repo local('git pull') def test(): local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg) # jslint # pychecker # run jasmine tests def compile(): # Minimize javascript using google closure. local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg) def deploy_appengine(): read_appcfg_oauth() local("appcfg.py --oauth2_refresh_token=%(appengine_refresh_token)s update %(appengine_dir)s" % cfg) def prepare_deploy(): test() compile() def deploy(): update() prepare_deploy() deploy_appengine() # tweet about release
Read appengine refresh_token from oauth file automatically.
NEW: Read appengine refresh_token from oauth file automatically.
Python
mit
ollej/GoldQuest,ollej/GoldQuest,ollej/GoldQuest,ollej/GoldQuest
670227590ceaf6eb52d56809f8bcc1b1f6ae6f7f
prettyplotlib/_eventplot.py
prettyplotlib/_eventplot.py
__author__ = 'jgosmann' from matplotlib.cbook import iterable from prettyplotlib.utils import remove_chartjunk, maybe_get_ax from prettyplotlib.colors import set2 def eventplot(*args, **kwargs): ax, args, kwargs = maybe_get_ax(*args, **kwargs) show_ticks = kwargs.pop('show_ticks', False) if len(args) > 0: positions = args[0] else: positions = kwargs['positions'] if any(iterable(p) for p in positions): size = len(positions) else: size = 1 kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]]) event_collections = ax.eventplot(*args, **kwargs) remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks) return event_collections
__author__ = 'jgosmann' from matplotlib.cbook import iterable from prettyplotlib.utils import remove_chartjunk, maybe_get_ax from prettyplotlib.colors import set2 def eventplot(*args, **kwargs): ax, args, kwargs = maybe_get_ax(*args, **kwargs) show_ticks = kwargs.pop('show_ticks', False) alpha = kwargs.pop('alpha', 1.0) if len(args) > 0: positions = args[0] else: positions = kwargs['positions'] if any(iterable(p) for p in positions): size = len(positions) else: size = 1 kwargs.setdefault('colors', [c + (alpha,) for c in set2[:size]]) event_collections = ax.eventplot(*args, **kwargs) remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks) return event_collections
Add alpha argument to eventplot().
Add alpha argument to eventplot().
Python
mit
olgabot/prettyplotlib,olgabot/prettyplotlib
c814fe264c93dfa09276474960aa83cdb26e7754
polyaxon/api/searches/serializers.py
polyaxon/api/searches/serializers.py
from rest_framework import serializers from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer): class Meta: model = Search fields = ['id', 'name', 'query', 'meta']
from rest_framework import serializers from rest_framework.exceptions import ValidationError from api.utils.serializers.names import NamesMixin from db.models.searches import Search class SearchSerializer(serializers.ModelSerializer, NamesMixin): class Meta: model = Search fields = ['id', 'name', 'query', 'meta'] def create(self, validated_data): validated_data = self.validated_name(validated_data, project=validated_data['project'], query=Search.all) try: return super().create(validated_data) except Exception as e: raise ValidationError(e)
Add graceful handling for creating search with similar names
Add graceful handling for creating search with similar names
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
e459a42af1c260986c7333047efd40294dbd23d3
akaudit/clidriver.py
akaudit/clidriver.py
#!/usr/bin/env python # Copyright 2015 Chris Fordham # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import argparse import akaudit from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description='Audit who has access to your homes.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='log level') parser.add_argument('-i', '--interactive', help='interactive mode (prompts asking if to delete each key)', action="store_true") parser.add_argument('-v', '--version', action="version", version='%(prog)s ' + akaudit.__version__) args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
#!/usr/bin/env python # Copyright 2015 Chris Fordham # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import sys import argparse import akaudit from akaudit.audit import Auditer def main(argv = sys.argv, log = sys.stderr): parser = argparse.ArgumentParser(description=akaudit.__description__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-l', '--log', default='info', help='log level') parser.add_argument('-i', '--interactive', help='interactive mode (prompts asking if to delete each key)', action="store_true") parser.add_argument('-v', '--version', action="version", version='%(prog)s ' + akaudit.__version__) args = parser.parse_args() auditer = Auditer() auditer.run_audit(args) if __name__ == "__main__": main(sys.argv[1:])
Use __description__ with parser instantiation.
Use __description__ with parser instantiation.
Python
apache-2.0
flaccid/akaudit
d90f249e0865dab0cc9a224f413ea90df8a648ed
srsly/util.py
srsly/util.py
from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
from pathlib import Path from typing import Union, Dict, Any, List, Tuple from collections import OrderedDict # fmt: off FilePath = Union[str, Path] # Superficial JSON input/output types # https://github.com/python/typing/issues/182#issuecomment-186684288 JSONOutput = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] JSONOutputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any]] # For input, we also accept tuples, ordered dicts etc. JSONInput = Union[str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] JSONInputBin = Union[bytes, str, int, float, bool, None, Dict[str, Any], List[Any], Tuple[Any, ...], OrderedDict] YAMLInput = JSONInput YAMLOutput = JSONOutput # fmt: on def force_path(location, require_exists=True): if not isinstance(location, Path): location = Path(location) if require_exists and not location.exists(): raise ValueError(f"Can't read file: {location}") return location def force_string(location): if isinstance(location, str): return location return str(location)
Fix typing for JSONInput and JSONInputBin.
Fix typing for JSONInput and JSONInputBin.
Python
mit
explosion/srsly,explosion/srsly,explosion/srsly,explosion/srsly
20e096ac5261cb7fd4197f6cdeb8b171753c82a7
landlab/values/tests/conftest.py
landlab/values/tests/conftest.py
import pytest from landlab import NetworkModelGrid, RasterModelGrid @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg
import pytest from landlab import NetworkModelGrid, RasterModelGrid from landlab.values.synthetic import _STATUS @pytest.fixture def four_by_four_raster(): mg = RasterModelGrid((4, 4)) return mg @pytest.fixture def simple_network(): y_of_node = (0, 1, 2, 2) x_of_node = (0, 0, -1, 1) nodes_at_link = ((1, 0), (2, 1), (3, 1)) mg = NetworkModelGrid((y_of_node, x_of_node), nodes_at_link) return mg def pytest_generate_tests(metafunc): if "at" in metafunc.fixturenames: metafunc.parametrize("at", ("node", "link", "patch", "corner", "face", "cell")) if "node_bc" in metafunc.fixturenames: metafunc.parametrize("node_bc", list(_STATUS["node"].keys())) if "link_bc" in metafunc.fixturenames: metafunc.parametrize("link_bc", list(_STATUS["link"].keys()))
Add parametrized fixture for at, node_bc, link_bc.
Add parametrized fixture for at, node_bc, link_bc.
Python
mit
landlab/landlab,cmshobe/landlab,landlab/landlab,cmshobe/landlab,amandersillinois/landlab,landlab/landlab,amandersillinois/landlab,cmshobe/landlab
bcde8104bd77f18d7061f7f4d4831ad49644a913
common/management/commands/build_index.py
common/management/commands/build_index.py
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) self.stdout.write("Finished indexing")
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): if model.__name__.lower() != 'testmodel': all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) else: # relation "common_testmodel" does not exist # Will be fixed pass self.stdout.write("Finished indexing")
Check the model beig indexed
Check the model beig indexed
Python
mit
urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,urandu/mfl_api
ccb1759a205a4cdc8f5eb2c28adcf49503221135
ecpy/tasks/api.py
ecpy/tasks/api.py
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015 by Ecpy Authors, see AUTHORS for more details. # # Distributed under the terms of the BSD license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- """Tasks package public interface. """ from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView']
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015 by Ecpy Authors, see AUTHORS for more details. # # Distributed under the terms of the BSD license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- """Tasks package public interface. """ from __future__ import (division, unicode_literals, print_function, absolute_import) import enaml from .base_tasks import BaseTask, SimpleTask, ComplexTask, RootTask from .task_interface import (InterfaceableTaskMixin, TaskInterface, InterfaceableInterfaceMixin, IInterface) from .manager.declarations import (Tasks, Task, Interfaces, Interface, TaskConfig) from .manager.filters import (TaskFilter, SubclassTaskFilter, GroupTaskFilter, MetadataTaskFilter) from .manager.configs.base_configs import BaseTaskConfig from .manager.utils.building import build_task_from_config with enaml.imports(): from .manager.configs.base_config_views import BaseConfigView from .base_views import BaseTaskView __all__ = ['BaseTask', 'SimpleTask', 'ComplexTask', 'RootTask', 'BaseTaskView', 'InterfaceableTaskMixin', 'TaskInterface', 'InterfaceableInterfaceMixin', 'IInterface', 'Tasks', 'Task', 'Interfaces', 'Interface', 'TaskConfig', 'TaskFilter', 'SubclassTaskFilter', 'GroupTaskFilter', 'MetadataTaskFilter', 'BaseTaskConfig', 'BaseConfigView', 'build_task_from_config']
Add tasks/build_from_config to the public API.
Add tasks/build_from_config to the public API.
Python
bsd-3-clause
Ecpy/ecpy,Ecpy/ecpy
e8bc2048f5b89b5540219b24921e596f11b34466
crypto_enigma/_version.py
crypto_enigma/_version.py
#!/usr/bin/env python # encoding: utf8 from __future__ import (absolute_import, print_function, division, unicode_literals) # See - http://www.python.org/dev/peps/pep-0440/ # See - http://semver.org __author__ = 'Roy Levien' __copyright__ = '(c) 2014-2015 Roy Levien' __release__ = '0.2.1' # N(.N)* __pre_release__ = 'b2' # aN | bN | cN | __suffix__ = ''#'.dev5' # .devN | | .postN __version__ = __release__ + __pre_release__ + __suffix__
#!/usr/bin/env python # encoding: utf8 from __future__ import (absolute_import, print_function, division, unicode_literals) # See - http://www.python.org/dev/peps/pep-0440/ # See - http://semver.org __author__ = 'Roy Levien' __copyright__ = '(c) 2014-2015 Roy Levien' __release__ = '0.2.1' # N(.N)* __pre_release__ = 'b3' # aN | bN | cN | __suffix__ = '.dev1' # .devN | | .postN __version__ = __release__ + __pre_release__ + __suffix__
Update test version following release
Update test version following release
Python
bsd-3-clause
orome/crypto-enigma-py
d8573d7d2d1825253dab6998fc70dd829399c406
src/config.py
src/config.py
# -*- coding: utf-8 -*- #DEBUG = True SECRET_KEY = "change secret key in production" SESSION_COOKIE_HTTPONLY = True SESSION_COOKIE_SECURE = True #PERMANENT_SESSION_LIFETIME = timedelta(minutes=10)
# -*- coding: utf-8 -*- import os #DEBUG = True SECRET_KEY = "change secret key in production" SESSION_COOKIE_HTTPONLY = True SESSION_COOKIE_SECURE = True #PERMANENT_SESSION_LIFETIME = timedelta(minutes=10) UNITTEST_USERNAME = os.environ.get('USERNAME', '') UNITTEST_PASSWORD = os.environ.get('PASSWORD', '')
Add unittest account and password
Add unittest account and password
Python
mit
JohnSounder/AP-API,kuastw/AP-API,JohnSounder/AP-API,kuastw/AP-API
7e15896c14cbbab36862c8000b0c25c6a48fedcd
cref/structure/__init__.py
cref/structure/__init__.py
# import porter_paleale def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
from peptide import PeptideBuilder import Bio.PDB def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ phi, psi = zip(*fragment_angles) structure = PeptideBuilder.make_structure(aa_sequence, phi, psi) out = Bio.PDB.PDBIO() out.set_structure(structure) out.save(filepath)
Write pdb result to disk
Write pdb result to disk
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
cfe18afa4954980380dc02338d434dc968bb898a
test/test_random_scheduler.py
test/test_random_scheduler.py
import json import random from mock import patch from pybossa.model.task import Task from pybossa.model.project import Project from pybossa.model.user import User from pybossa.model.task_run import TaskRun from pybossa.model.category import Category import pybossa import sys import os sys.path.append(os.path.abspath("../pybossa/test")) from helper import sched from default import Test, db, with_context class TestSched(sched.Helper): def setUp(self): super(TestSched, self).setUp() self.endpoints = ['project', 'task', 'taskrun'] @with_context def test_get_random_task(self): self._test_get_random_task() def _test_get_random_task(self, user=None): task = pybossa.sched.get_random_task(project_id=1) assert task is not None, task tasks = db.session.query(Task).all() for t in tasks: db.session.delete(t) db.session.commit() task = pybossa.sched.get_random_task(project_id=1) assert task is None, task
import json import random from mock import patch from pybossa.model.task import Task from pybossa.model.project import Project from pybossa.model.user import User from pybossa.model.task_run import TaskRun from pybossa.model.category import Category import pybossa import sys import os sys.path.append(os.path.abspath("./pybossa/test")) from helper import sched from default import Test, db, with_context class TestSched(sched.Helper): def setUp(self): super(TestSched, self).setUp() self.endpoints = ['project', 'task', 'taskrun'] @with_context def test_get_random_task(self): self._test_get_random_task() def _test_get_random_task(self, user=None): task = pybossa.sched.get_random_task(project_id=1) assert task is not None, task tasks = db.session.query(Task).all() for t in tasks: db.session.delete(t) db.session.commit() task = pybossa.sched.get_random_task(project_id=1) assert task is None, task
Fix path to pybossa tests
Fix path to pybossa tests
Python
agpl-3.0
PyBossa/random-scheduler
810a43c859264e3d5e1af8b43888bf89c06bee1d
ipybind/stream.py
ipybind/stream.py
# -*- coding: utf-8 -*- import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def suppress(): if fcntl: with Forwarder(handler=lambda _: None): yield else: yield @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
# -*- coding: utf-8 -*- import contextlib import sys try: import fcntl except ImportError: fcntl = None from ipybind.common import is_kernel from ipybind.ext.wurlitzer import Wurlitzer _fwd = None class Forwarder(Wurlitzer): def __init__(self, handler=None): self._data_handler = handler if handler is not None else lambda x: x super().__init__(stdout=sys.stdout, stderr=sys.stderr) def _handle_data(self, data, stream): data = self._data_handler(self._decode(data)) if data and stream: stream.write(data) def _handle_stdout(self, data): self._handle_data(data, self._stdout) def _handle_stderr(self, data): self._handle_data(data, self._stderr) @contextlib.contextmanager def forward(handler=None): global _fwd if _fwd is None and is_kernel() and fcntl: with Forwarder(handler=handler): yield else: yield def start_forwarding(handler=None): global _fwd if fcntl: if _fwd is None: _fwd = Forwarder(handler=handler) _fwd.__enter__() def stop_forwarding(handler=None): global _fwd if fcntl: if _fwd is not None: _fwd.__exit__(None, None, None) _fwd = None
Remove suppress() as it's no longer required
Remove suppress() as it's no longer required
Python
mit
aldanor/ipybind,aldanor/ipybind,aldanor/ipybind
db19dfa17261c3d04de0202b2809ba8abb70326b
tests/unit/test_moxstubout.py
tests/unit/test_moxstubout.py
# Copyright 2014 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2)
# Copyright 2014 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) f._clear_cleanups()
Fix build break with Fixtures 1.3
Fix build break with Fixtures 1.3 Our explicit call to cleanUp messes things up in latest fixture, so we need to call _clear_cleanups to stop the test from breaking Change-Id: I8ce2309a94736b47fb347f37ab4027857e19c8a8
Python
apache-2.0
openstack/oslotest,openstack/oslotest
5ac84c4e9d8d68b7e89ebf344d2c93a5f7ef4c4c
notebooks/galapagos_to_pandas.py
notebooks/galapagos_to_pandas.py
# coding: utf-8 def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in 'RUGIZYJHK']) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
# coding: utf-8 def galapagos_to_pandas(in_filename='/home/ppzsb1/quickdata/GAMA_9_all_combined_gama_only_bd6.fits', out_filename=None, bands='RUGIZYJHK'): """Convert a GALAPAGOS multi-band catalogue to a pandas-compatible HDF5 file""" from astropy.io import fits import pandas as pd import re import tempfile if out_filename is None: out_filename = re.sub('.fits$', '', in_filename)+'.h5' data = fits.getdata(in_filename, 1) with tempfile.NamedTemporaryFile() as tmp: with pd.get_store(tmp.name, mode='w') as tmpstore: for n in data.names: d = data[n] if len(d.shape) == 1: new_cols = pd.DataFrame(d, columns=[n]) else: new_cols = pd.DataFrame(d, columns=['{}_{}'.format(n,b) for b in bands]) tmpstore[n] = new_cols with pd.get_store(out_filename, mode='w', complib='blosc', complevel=5) as store: # Use format='table' on next line to save as a pytables table store.put('data', pd.concat([tmpstore[n] for n in data.names], axis=1)) return pd.HDFStore(out_filename)
Allow specification of GALAPAGOS bands
Allow specification of GALAPAGOS bands
Python
mit
MegaMorph/megamorph-analysis
3136f7e37b339252d4c1f5642974e180070c452d
kirppu/signals.py
kirppu/signals.py
# -*- coding: utf-8 -*- from django.db.models.signals import pre_save, pre_delete from django.dispatch import receiver @receiver(pre_save) def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") @receiver(pre_delete) def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
# -*- coding: utf-8 -*- from django.db.models.signals import pre_migrate, post_migrate from django.dispatch import receiver ENABLE_CHECK = True @receiver(pre_migrate) def pre_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = False @receiver(post_migrate) def post_migrate_handler(*args, **kwargs): global ENABLE_CHECK ENABLE_CHECK = True def save_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Saving objects in non-default database should not happen") def delete_handler(sender, instance, using, **kwargs): # noinspection PyProtectedMember if ENABLE_CHECK and instance._meta.app_label in ("kirppu", "kirppuauth") and using != "default": raise ValueError("Deleting objects from non-default database should not happen")
Allow migrations to be run on extra databases.
Allow migrations to be run on extra databases. - Remove duplicate registration of save and delete signals. Already registered in apps.
Python
mit
jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu,jlaunonen/kirppu
89508d6ea3e89ce87f327a88571c892d4bfcbec5
setup.py
setup.py
import subprocess import sys from setuptools import Command, setup class RunTests(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests']) raise SystemExit(errno) with open('README.md') as readme: long_description = readme.read() setup( name='gis-metadata-parser', description='Parser for GIS metadata standards including FGDC and ISO-19115', long_description=long_description, long_description_content_type='text/markdown', keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser', version='1.2.3', packages=[ 'gis_metadata', 'gis_metadata.tests' ], install_requires=[ 'frozendict>=1.2', 'parserutils>=1.1', 'six>=1.9.0' ], tests_require=['mock'], url='https://github.com/consbio/gis-metadata-parser', license='BSD', cmdclass={'test': RunTests} )
import subprocess import sys from setuptools import Command, setup class RunTests(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests']) raise SystemExit(errno) with open('README.md') as readme: long_description = readme.read() setup( name='gis-metadata-parser', description='Parser for GIS metadata standards including FGDC and ISO-19115', long_description=long_description, long_description_content_type='text/markdown', keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser', version='1.2.4', packages=[ 'gis_metadata', 'gis_metadata.tests' ], install_requires=[ 'frozendict>=1.2', 'parserutils>=1.1', 'six>=1.9.0' ], tests_require=['mock'], url='https://github.com/consbio/gis-metadata-parser', license='BSD', cmdclass={'test': RunTests} )
Increment minor version after ArcGIS fix and improved tests and docs
Increment minor version after ArcGIS fix and improved tests and docs
Python
bsd-3-clause
consbio/gis-metadata-parser
1dd3333a433bac0ee2a155fd33987fa542e968a4
setup.py
setup.py
# -*- coding: utf-8 -*- from pypandoc import convert from setuptools import setup setup( name='mws', version='0.7', maintainer="James Hiew", maintainer_email="[email protected]", url="http://github.com/jameshiew/mws", description='Python library for interacting with the Amazon MWS API', long_description=convert("README.md", 'rst'), packages=['mws'], install_requires=[ 'requests' ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], platforms=['OS Independent'], license='Unlicense', include_package_data=True, zip_safe=False )
# -*- coding: utf-8 -*- short_description = 'Python library for interacting with the Amazon MWS API' try: from pypandoc import convert except (ImportError, OSError): # either pypandoc or pandoc isn't installed long_description = "See README.md" else: long_description = convert("README.md", 'rst') from setuptools import setup setup( name='mws', version='0.7', maintainer="James Hiew", maintainer_email="[email protected]", url="http://github.com/jameshiew/mws", description=short_description, long_description=long_description, packages=['mws'], install_requires=[ 'requests' ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Application Frameworks', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], platforms=['OS Independent'], license='Unlicense', include_package_data=True, zip_safe=False )
Fix pip install errors when (py)pandoc is missing
Fix pip install errors when (py)pandoc is missing
Python
unlicense
Bobspadger/python-amazon-mws,GriceTurrble/python-amazon-mws,bpipat/mws,jameshiew/mws
46f080487790cdbc430adc8b3b4f0ea7a1e4cdb6
setup.py
setup.py
import os import re from setuptools import ( find_packages, setup, ) version_re = re.compile(r"__version__\s*=\s*['\"](.*?)['\"]") def get_version(): base = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(base, 'curator/__init__.py')) as initf: for line in initf: m = version_re.match(line.strip()) if not m: continue return m.groups()[0] setup( name='redis-lua-curator', version=get_version(), description='Helper for working with lua scripts.', packages=find_packages(exclude=[ "*.tests", "*.tests.*", "tests.*", "tests", ]), author='Michael Hahn', author_email='[email protected]', url='https://github.com/mhahn/curator/', download_url='https://github.com/mhahn/curator/tarball/%s' % ( get_version(), ), setup_requires=[ 'nose>=1.0', 'coverage>=1.0', 'mock==1.0.1', 'unittest2==0.5.1', ], install_requires=[ 'redis==2.10.1', 'jinja2==2.7.2', ], keywords=['redis', 'lua'], )
import os import re from setuptools import ( find_packages, setup, ) version_re = re.compile(r"__version__\s*=\s*['\"](.*?)['\"]") def get_version(): base = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(base, 'curator/__init__.py')) as initf: for line in initf: m = version_re.match(line.strip()) if not m: continue return m.groups()[0] setup( name='redis-lua-curator', version=get_version(), description='Helper for working with lua scripts.', packages=find_packages(exclude=[ "*.tests", "*.tests.*", "tests.*", "tests", ]), author='Michael Hahn', author_email='[email protected]', url='https://github.com/mhahn/curator/', download_url='https://github.com/mhahn/curator/tarball/%s' % ( get_version(), ), setup_requires=[ 'nose>=1.0', 'coverage>=1.0', 'mock==1.0.1', 'unittest2==0.5.1', ], install_requires=[ 'redis >= 2.8.0, <= 2.10.1', 'jinja2==2.7.2', ], keywords=['redis', 'lua'], )
Support a range of redis client versions
Support a range of redis client versions
Python
mit
eventbrite/curator
8be5530e1fca59aff42b404b64324b68235bfd87
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages import chagallpy setup( name='chagallpy', version=chagallpy.__version__, packages=find_packages(), license='MIT', description='CHArming GALLEry in PYthon', long_description=open('README.md').read(), author='Jan Pipek', author_email='jan DOT pipek AT gmail COM', url='https://github.com/janpipek/chagallpy', install_requires = [ 'wowp', 'pillow', "jinja2" ], entry_points = { 'console_scripts' : [ 'chagall = chagallpy:generate' ] }, include_package_data = True, package_data = { 'resources': ['*.*'], 'templates': ['*.html'] }, )
#!/usr/bin/env python from setuptools import setup, find_packages import chagallpy setup( name='chagallpy', version=chagallpy.__version__, packages=find_packages(), license='MIT', description='CHArming GALLEry in PYthon', long_description=open('README.md').read(), author='Jan Pipek', author_email='[email protected]', url='https://github.com/janpipek/chagallpy', install_requires = [ 'wowp', 'pillow', "jinja2" ], entry_points = { 'console_scripts' : [ 'chagall = chagallpy:generate' ] }, include_package_data = True, package_data = { 'resources': ['*.*'], 'templates': ['*.html'] }, )
Fix email address to be able to upload to pypi
Fix email address to be able to upload to pypi
Python
mit
janpipek/chagallpy,janpipek/chagallpy,janpipek/chagallpy
8c7b048ff02439573a0ad399e5e11ea6f9bfd3a0
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages dev_requires = [ 'Sphinx==1.2.2', ] install_requires = [ 'nodeconductor>=0.95.0', ] setup( name='oracle-paas', version='0.1.0', author='OpenNode Team', author_email='[email protected]', url='http://nodeconductor.com', description='Plugin for custom Oracle PaaS', long_description=open('README.rst').read(), package_dir={'': 'src'}, packages=find_packages('src', exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']), install_requires=install_requires, zip_safe=False, extras_require={ 'dev': dev_requires, }, entry_points={ 'nodeconductor_extensions': ( 'oracle_paas = oracle_paas.extension:OracleExtension', ), }, include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'License :: Apache v2', ], )
#!/usr/bin/env python from setuptools import setup, find_packages dev_requires = [ 'Sphinx==1.2.2', ] install_requires = [ 'nodeconductor>=0.95.0', ] setup( name='nodeconductor-paas-oracle', version='0.1.0', author='OpenNode Team', author_email='[email protected]', url='http://nodeconductor.com', description='Plugin for custom Oracle PaaS', long_description=open('README.rst').read(), package_dir={'': 'src'}, packages=find_packages('src', exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']), install_requires=install_requires, zip_safe=False, extras_require={ 'dev': dev_requires, }, entry_points={ 'nodeconductor_extensions': ( 'oracle_paas = oracle_paas.extension:OracleExtension', ), }, include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'License :: Apache v2', ], )
Rename package oracle-paas -> nodeconductor-paas-oracle
Rename package oracle-paas -> nodeconductor-paas-oracle
Python
mit
opennode/nodeconductor-paas-oracle
9bebb444525f57558114623c2d2b69013b26a703
setup.py
setup.py
#! /usr/bin/env python """Setup information of demandlib. """ from setuptools import setup, find_packages import os def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='demandlib', version='0.1.7dev', author='oemof developer group', url='https://oemof.org/', license='GPL3', author_email='[email protected]', description='Demandlib of the open energy modelling framework', long_description=read('README.rst'), packages=find_packages(), install_requires=['numpy >= 1.7.0, < 1.17', 'pandas >= 0.18.0, < 0.25'], package_data={ 'demandlib': [os.path.join('bdew_data', '*.csv')], 'demandlib.examples': ['*.csv']}, )
#! /usr/bin/env python """Setup information of demandlib. """ from setuptools import setup, find_packages import os def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='demandlib', version='0.1.7dev', author='oemof developer group', url='https://oemof.org/', license='GPL3', author_email='[email protected]', description='Demandlib of the open energy modelling framework', long_description=read('README.rst'), packages=find_packages(), install_requires=['numpy >= 1.7.0, < 1.17', 'pandas >= 0.18.0, < 1.2'], package_data={ 'demandlib': [os.path.join('bdew_data', '*.csv')], 'demandlib.examples': ['*.csv']}, )
Allow pandas 1.1 as dependency
Allow pandas 1.1 as dependency
Python
mit
oemof/demandlib
7044fa0005f5f056ee5d6bc4e421fb81454317f6
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages version_tuple = __import__('pymysql').VERSION if version_tuple[3] is not None: version = "%d.%d.%d_%s" % version_tuple else: version = "%d.%d.%d" % version_tuple[:3] setup( name="PyMySQL", version=version, url='https://github.com/PyMySQL/PyMySQL/', download_url = 'https://github.com/PyMySQL/PyMySQL/tarball/pymysql-%s' % version, author='yutaka.matsubara', author_email='[email protected]', maintainer='INADA Naoki', maintainer_email='[email protected]', description='Pure-Python MySQL Driver', license="MIT", packages=find_packages(), classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Database', ] )
#!/usr/bin/env python from setuptools import setup, find_packages version_tuple = __import__('pymysql').VERSION if version_tuple[3] is not None: version = "%d.%d.%d_%s" % version_tuple else: version = "%d.%d.%d" % version_tuple[:3] setup( name="PyMySQL", version=version, url='https://github.com/PyMySQL/PyMySQL/', author='yutaka.matsubara', author_email='[email protected]', maintainer='INADA Naoki', maintainer_email='[email protected]', description='Pure-Python MySQL Driver', license="MIT", packages=find_packages(), classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Database', ] )
Remove not working download URI
Remove not working download URI
Python
mit
pymysql/pymysql,PyMySQL/PyMySQL,MartinThoma/PyMySQL,methane/PyMySQL,wraziens/PyMySQL,wraziens/PyMySQL
270afd4d11ebc3888873cd6ffe89b988593c3e41
setup.py
setup.py
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Setup configuration specifying XManager dependencies.""" from setuptools import find_namespace_packages from setuptools import setup setup( name='xmanager', version='1.0.0', description='A framework for managing experiments', author='DeepMind Technologies Limited', packages=find_namespace_packages(), include_package_data=True, package_data={'': ['*.sh', '*.sql']}, python_requires='>=3.7', install_requires=[ 'absl-py', 'async_generator', 'attrs', 'docker', 'immutabledict', 'google-api-core', 'google-api-python-client', 'google-cloud-aiplatform>=1.4.0', 'google-auth', 'google-cloud-storage', 'humanize', 'kubernetes', 'sqlalchemy==1.2', 'termcolor', ], entry_points={ 'console_scripts': ['xmanager = xmanager.cli.cli:Entrypoint',], }, )
# Copyright 2021 DeepMind Technologies Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Setup configuration specifying XManager dependencies.""" from setuptools import find_namespace_packages from setuptools import setup setup( name='xmanager', version='1.0.0', description='A framework for managing experiments', author='DeepMind Technologies Limited', packages=find_namespace_packages(), include_package_data=True, package_data={'': ['*.sh', '*.sql']}, python_requires='>=3.7', install_requires=[ 'absl-py', 'async_generator', 'attrs', 'docker', 'google-api-core', 'google-api-python-client', 'google-cloud-aiplatform>=1.4.0', 'google-auth', 'google-cloud-storage', 'humanize', 'immutabledict', 'kubernetes', 'sqlalchemy==1.2', 'termcolor', ], entry_points={ 'console_scripts': ['xmanager = xmanager.cli.cli:Entrypoint',], }, )
Maintain alphabetical order in `install_requires`
Maintain alphabetical order in `install_requires` PiperOrigin-RevId: 395092683 Change-Id: I87f23eafcb8a3cdafd36b8fd700f8a1f24f9fa6e GitOrigin-RevId: a0819922a706dec7b8c2a17181c56a6900288e67
Python
apache-2.0
deepmind/xmanager,deepmind/xmanager
add426252ad864860f1188b446d05ad6bcf11df2
setup.py
setup.py
from setuptools import setup, find_packages setup( name='lightstep', version='3.0.11', description='LightStep Python OpenTracing Implementation', long_description='', author='LightStep', license='', install_requires=['thrift==0.10.0', 'jsonpickle', 'six', 'basictracer>=2.2,<2.3', 'googleapis-common-protos==1.5.3', 'requests==2.19.1'], tests_require=['pytest', 'sphinx', 'sphinx-epytext'], classifiers=[ 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', ], keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ], packages=find_packages(exclude=['docs*', 'tests*', 'sample*']), )
from setuptools import setup, find_packages setup( name='lightstep', version='3.0.11', description='LightStep Python OpenTracing Implementation', long_description='', author='LightStep', license='', install_requires=['thrift==0.10.0', 'jsonpickle', 'six', 'basictracer>=2.2,<2.3', 'googleapis-common-protos==1.5.3', 'requests>=2.19,<3.0'], tests_require=['pytest', 'sphinx', 'sphinx-epytext'], classifiers=[ 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', ], keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ], packages=find_packages(exclude=['docs*', 'tests*', 'sample*']), )
Make requests dependency version more flexible
LS-5226: Make requests dependency version more flexible
Python
mit
lightstephq/lightstep-tracer-python
c08460faaccf75acb43f8bab6e3248666ff811c6
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-admin-extend', version='0.0.1', description=('Provides functionality for extending' 'ModelAdmin classes that have already' 'been registered by other apps'), author='Ioan Alexandru Cucu', author_email='[email protected]', url='https://github.com/kux/django-admin-extend', download_url='https://github.com/kux/django-admin-extend/tarball/0.1', install_requires=('Django>=1.3',), packages=find_packages(), include_package_data=True, )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-admin-extend', version='0.0.2', description=('Provides functionality for extending' 'ModelAdmin classes that have already' 'been registered by other apps'), author='Ioan Alexandru Cucu', author_email='[email protected]', url='https://github.com/kux/django-admin-extend', download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz, install_requires=('Django>=1.3',), packages=find_packages(), include_package_data=True, )
Fix download url and bump version
Fix download url and bump version
Python
mit
kux/django-admin-extend
a5baa5f333625244c1e0935745dadedb7df444c3
setup.py
setup.py
#!/usr/bin/env python import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests', "catchy==0.1.0"], )
#!/usr/bin/env python import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests>=1,<2', "catchy>=0.1.0,<0.2"], )
Update install_requires to be more accurate
Update install_requires to be more accurate
Python
bsd-2-clause
mwilliamson/whack
036bbfb768de845b3495b99d212fffbf98ba5571
setup.py
setup.py
import os try: from setuptools import setup, Extension except ImportError: # Use distutils.core as a fallback. # We won't be able to build the Wheel file on Windows. from distutils.core import setup, Extension extensions = [] if os.name == 'nt': ext = Extension( 'asyncio._overlapped', ['overlapped.c'], libraries=['ws2_32'], ) extensions.append(ext) setup( name="asyncio", version="0.1.1", description="reference implementation of PEP 3156", long_description=open("README").read(), url="http://www.python.org/dev/peps/pep-3156/", classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", ], packages=["asyncio"], ext_modules=extensions, )
import os try: from setuptools import setup, Extension except ImportError: # Use distutils.core as a fallback. # We won't be able to build the Wheel file on Windows. from distutils.core import setup, Extension extensions = [] if os.name == 'nt': ext = Extension( 'asyncio._overlapped', ['overlapped.c'], libraries=['ws2_32'], ) extensions.append(ext) setup( name="asyncio", version="0.2.1", description="reference implementation of PEP 3156", long_description=open("README").read(), url="http://www.python.org/dev/peps/pep-3156/", classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", ], packages=["asyncio"], ext_modules=extensions, )
Set version to 0.2.1. Ready for PyPI.
Set version to 0.2.1. Ready for PyPI.
Python
apache-2.0
jashandeep-sohi/asyncio,gsb-eng/asyncio,gsb-eng/asyncio,ajdavis/asyncio,ajdavis/asyncio,fallen/asyncio,1st1/asyncio,Martiusweb/asyncio,Martiusweb/asyncio,fallen/asyncio,jashandeep-sohi/asyncio,manipopopo/asyncio,jashandeep-sohi/asyncio,gvanrossum/asyncio,ajdavis/asyncio,haypo/trollius,haypo/trollius,Martiusweb/asyncio,fallen/asyncio,manipopopo/asyncio,haypo/trollius,gvanrossum/asyncio,vxgmichel/asyncio,vxgmichel/asyncio,gsb-eng/asyncio,vxgmichel/asyncio,1st1/asyncio,manipopopo/asyncio,1st1/asyncio,gvanrossum/asyncio
dc460d02b489a5cbca34f5525fa9f0ac0a67cb61
setup.py
setup.py
from distribute_setup import use_setuptools use_setuptools() from setuptools import setup setup( name = "open511", version = "0.1", url='', license = "", packages = [ 'open511', ], install_requires = [ 'lxml', 'webob', 'python-dateutil>=1.5,<2.0', 'requests', 'pytz', 'django-appconf==0.5', ], entry_points = { 'console_scripts': [ 'mtl_kml_to_open511 = open511.scripts.mtl_kml_to_open511:main', 'scrape_mtq_to_open511 = open511.scripts.scrape_mtq_to_open511:main', ] }, )
from distribute_setup import use_setuptools use_setuptools() from setuptools import setup setup( name = "open511", version = "0.1", url='', license = "", packages = [ 'open511', ], install_requires = [ 'lxml>=2.3', 'WebOb>=1.2,<2', 'python-dateutil>=1.5,<2.0', 'requests>=1.2,<2', 'pytz>=2013b', 'django-appconf==0.5', 'cssselect==0.8', ], entry_points = { 'console_scripts': [ 'mtl_kml_to_open511 = open511.scripts.mtl_kml_to_open511:main', 'scrape_mtq_to_open511 = open511.scripts.scrape_mtq_to_open511:main', ] }, )
Make dependency versions more explicit
Make dependency versions more explicit
Python
mit
Open511/open511-server,Open511/open511-server,Open511/open511-server
e848239bedb6dca579e24a23bc04a7ce4f2d1a80
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup,find_packages METADATA = dict( name='django-allauth', version='0.0.1', author='Raymond Penners', author_email='[email protected]', description='Integrated set of Django applications addressing authentication, registration, account management as well as 3rd party (social) account authentication.', long_description=open('README.rst').read(), url='http://github.com/pennersr/django-allauth', keywords='django auth account social openid twitter facebook oauth registration', install_requires=['django', 'oauth2', 'python-openid', 'django-email-confirmation', 'django-uni-form'], include_package_data=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'Environment :: Web Environment', 'Topic :: Internet', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], packages=find_packages(), package_data={'allauth': ['templates/allauth/*.html'], } ) if __name__ == '__main__': setup(**METADATA)
#!/usr/bin/env python from setuptools import setup,find_packages METADATA = dict( name='django-allauth', version='0.0.1', author='Raymond Penners', author_email='[email protected]', description='Integrated set of Django applications addressing authentication, registration, account management as well as 3rd party (social) account authentication.', long_description=open('README.rst').read(), url='http://github.com/pennersr/django-allauth', keywords='django auth account social openid twitter facebook oauth registration', install_requires=['django', 'oauth2', 'python-openid', 'django-email-confirmation', 'django-uni-form'], include_package_data=True, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', 'Environment :: Web Environment', 'Topic :: Internet', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], packages=find_packages(), package_data={'allauth': ['templates/allauth/*.html'], } ) if __name__ == '__main__': setup(**METADATA)
Change development status to beta
Change development status to beta
Python
mit
pztrick/django-allauth,jwhitlock/django-allauth,grue/django-allauth,hanasoo/django-allauth,sih4sing5hong5/django-allauth,tigeraniya/django-allauth,kingofsystem/django-allauth,JoshLabs/django-allauth,rsalmaso/django-allauth,MickaelBergem/django-allauth,wli/django-allauth,knowsis/django-allauth,rawjam/django-allauth,pennersr/django-allauth,petersanchez/django-allauth,sih4sing5hong5/django-allauth,jscott1989/django-allauth,carltongibson/django-allauth,erueloi/django-allauth,jwhitlock/django-allauth,moreati/django-allauth,fabiocerqueira/django-allauth,github-account-because-they-want-it/django-allauth,MickaelBergem/django-allauth,janusnic/django-allauth,SakuradaJun/django-allauth,tigeraniya/django-allauth,italomaia/django-allauth,pranjalpatil/django-allauth,bitcity/django-allauth,patricio-astudillo/django-allauth,avsd/django-allauth,spool/django-allauth,JshWright/django-allauth,pztrick/django-allauth,patricio-astudillo/django-allauth,bitcity/django-allauth,neo/django-allauth,ankitjain87/django-allauth,zhangziang/django-allauth,ldgarcia/django-allauth,bopo/django-allauth,cudadog/django-allauth,agriffis/django-allauth,bopo/django-allauth,aexeagmbh/django-allauth,rsalmaso/django-allauth,igorgai/django-allauth,concentricsky/django-allauth,joshowen/django-allauth,zhangziang/django-allauth,aexeagmbh/django-allauth,concentricsky/django-allauth,alacritythief/django-allauth,pankeshang/django-allauth,tigeraniya/django-allauth,cudadog/django-allauth,ZachLiuGIS/django-allauth,dincamihai/django-allauth,avsd/django-allauth,beswarm/django-allauth,willharris/django-allauth,nimbis/django-allauth,joebos/django-allauth,yarbelk/django-allauth,nangia/django-allauth,wli/django-allauth,bjorand/django-allauth,kingofsystem/django-allauth,lmorchard/django-allauth,pennersr/django-allauth,HackerEarth/django-allauth,bopo/django-allauth,ankitjain87/django-allauth,alacritythief/django-allauth,joebos/django-allauth,igorgai/django-allauth,repetilo-andrey/django-allauth,wayward710/django-allauth,jscott1989/django-allauth,ZachLiuGIS/django-allauth,dincamihai/django-allauth,knowsis/django-allauth,erueloi/django-allauth,fabiocerqueira/django-allauth,janusnic/django-allauth,fuzzpedal/django-allauth,MickaelBergem/django-allauth,yarbelk/django-allauth,jscott1989/django-allauth,patricio-astudillo/django-allauth,agriffis/django-allauth,petersanchez/django-allauth,nimbis/django-allauth,rulz/django-allauth,ashwoods/django-allauth,github-account-because-they-want-it/django-allauth,rawjam/django-allauth,armicron/django-allauth,JoshLabs/django-allauth,carltongibson/django-allauth,bjorand/django-allauth,manran/django-allauth,knowsis/django-allauth,repetilo-andrey/django-allauth,wli/django-allauth,davidrenne/django-allauth,cudadog/django-allauth,ashwoods/django-allauth,SakuradaJun/django-allauth,ashwoods/django-allauth,carltongibson/django-allauth,pranjalpatil/django-allauth,wayward710/django-allauth,pankeshang/django-allauth,willharris/django-allauth,7WebPages/django-allauth,willharris/django-allauth,avsd/django-allauth,hanasoo/django-allauth,socialsweethearts/django-allauth,joshowen/django-allauth,armicron/django-allauth,igorgai/django-allauth,payamsm/django-allauth,lmorchard/django-allauth,7WebPages/django-allauth,socialsweethearts/django-allauth,janusnic/django-allauth,pankeshang/django-allauth,joshowen/django-allauth,spool/django-allauth,neo/django-allauth,payamsm/django-allauth,sih4sing5hong5/django-allauth,fabiocerqueira/django-allauth,AltSchool/django-allauth,JshWright/django-allauth,rsalmaso/django-allauth,fuzzpedal/django-allauth,vuchau/django-allauth,BadgerMaps/django-allauth,github-account-because-they-want-it/django-allauth,dincamihai/django-allauth,manran/django-allauth,owais/django-allauth,80vs90/django-allauth,italomaia/django-allauth,hanasoo/django-allauth,rulz/django-allauth,vuchau/django-allauth,beswarm/django-allauth,yarbelk/django-allauth,alacritythief/django-allauth,owais/django-allauth,moreati/django-allauth,nangia/django-allauth,lukeburden/django-allauth,pranjalpatil/django-allauth,ldgarcia/django-allauth,agriffis/django-allauth,davidrenne/django-allauth,AltSchool/django-allauth,lmorchard/django-allauth,zhangziang/django-allauth,7WebPages/django-allauth,vuchau/django-allauth,davidrenne/django-allauth,pennersr/django-allauth,spool/django-allauth,SakuradaJun/django-allauth,petersanchez/django-allauth,concentricsky/django-allauth,rulz/django-allauth,kingofsystem/django-allauth,socialsweethearts/django-allauth,julen/django-allauth,ldgarcia/django-allauth,ZachLiuGIS/django-allauth,owais/django-allauth,AltSchool/django-allauth,pztrick/django-allauth,BadgerMaps/django-allauth,bjorand/django-allauth,jwhitlock/django-allauth,bittner/django-allauth,BadgerMaps/django-allauth,neo/django-allauth,80vs90/django-allauth,JshWright/django-allauth,manran/django-allauth,lukeburden/django-allauth,aexeagmbh/django-allauth,grue/django-allauth,repetilo-andrey/django-allauth,80vs90/django-allauth,italomaia/django-allauth,nimbis/django-allauth,beswarm/django-allauth,wayward710/django-allauth,bitcity/django-allauth,moreati/django-allauth,fuzzpedal/django-allauth,joebos/django-allauth,bittner/django-allauth,erueloi/django-allauth,payamsm/django-allauth,julen/django-allauth,nangia/django-allauth,lukeburden/django-allauth,bittner/django-allauth,ankitjain87/django-allauth,sachingupta006/django-allauth
6aa81b7f97b39e32f6c5148a26366cf72c46d1e9
setup.py
setup.py
from setuptools import setup, find_packages setup( name="vumi_twilio_api", version="0.0.1a", url="https://github.com/praekelt/vumi-twilio-api", license="BSD", description="Provides a REST API to Vumi that emulates the Twilio API", long_description=open("README.rst", "r").read(), author="Praekelt Foundation", author_email="[email protected]", packages=find_packages(), scripts=[], install_requires=[], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Networking', ], )
from setuptools import setup, find_packages setup( name="vxtwinio", version="0.0.1a", url="https://github.com/praekelt/vumi-twilio-api", license="BSD", description="Provides a REST API to Vumi that emulates the Twilio API", long_description=open("README.rst", "r").read(), author="Praekelt Foundation", author_email="[email protected]", packages=find_packages(), scripts=[], install_requires=[], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Networking', ], )
Change package name to vxtwinio
Change package name to vxtwinio
Python
bsd-3-clause
praekelt/vumi-twilio-api
00e2abb375c25bd8507c575e9b5b2567aa029061
setup.py
setup.py
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='1.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/ropez/pytest-describe', author='Robin Pedersen', author_email='[email protected]', license='MIT license', install_requires=[ 'pytest>=2.6.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], )
from setuptools import setup with open("README.rst") as readme_file: readme = readme_file.read() setup( name='pytest-describe', version='1.0.0', description='Describe-style plugin for pytest', long_description=readme, long_description_content_type='text/x-rst', url='https://github.com/pytest-dev/pytest-describe', author='Robin Pedersen', author_email='[email protected]', license='MIT license', install_requires=[ 'pytest>=2.6.0', ], entry_points={ 'pytest11': [ 'pytest-describe = pytest_describe.plugin' ], }, packages=['pytest_describe'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX', 'Operating System :: Microsoft :: Windows', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', ], )
Update URL since repository has moved to pytest-dev
Update URL since repository has moved to pytest-dev
Python
mit
ropez/pytest-describe
b0a2ef5f0acdcd987045737d1b7cc953b09fae28
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup with open('README.rst') as rdm: README = rdm.read() setup( name='stagpy', use_scm_version=True, description='Tool for StagYY output files processing', long_description=README, url='https://github.com/StagPython/StagPy', author='Martina Ulvrova, Adrien Morison, StΓ©phane Labrosse', author_email='[email protected]', license='GPLv2', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], packages = ['stagpy'], entry_points = { 'console_scripts': ['stagpy = stagpy.__main__:main'] }, setup_requires=['setuptools_scm'], install_requires = [ 'numpy>=1.12', 'scipy>=0.19', 'f90nml>=0.21', 'pandas>=0.20', 'matplotlib>=2.0', 'seaborn>=0.7.1', 'argcomplete>=1.8', 'setuptools_scm>=1.15', ], )
# -*- coding: utf-8 -*- from setuptools import setup with open('README.rst') as rdm: README = rdm.read() setup( name='stagpy', use_scm_version=True, description='Tool for StagYY output files processing', long_description=README, url='https://github.com/StagPython/StagPy', author='Martina Ulvrova, Adrien Morison, StΓ©phane Labrosse', author_email='[email protected]', license='GPLv2', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Programming Language :: Python :: 3 :: Only', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], packages = ['stagpy'], entry_points = { 'console_scripts': ['stagpy = stagpy.__main__:main'] }, setup_requires=['setuptools_scm'], install_requires = [ 'numpy>=1.12', 'scipy>=0.17', 'f90nml>=0.21', 'pandas>=0.20', 'matplotlib>=2.0', 'seaborn>=0.7.1', 'argcomplete>=1.8', 'setuptools_scm>=1.15', ], )
Decrease scipy version to 0.17 (for RTD)
Decrease scipy version to 0.17 (for RTD)
Python
apache-2.0
StagPython/StagPy
2c5dd9086681422b21d0bfac0906db5ccdf22b0c
setup.py
setup.py
from setuptools import setup, find_packages setup( name = "biofloat", version = "0.3.0", packages = find_packages(), requires = ['Python (>=2.7)'], install_requires = [ 'beautifulsoup4>=4.4', 'coverage>=4', 'jupyter>=1.0.0', 'matplotlib', 'numpy>=1.10', 'pandas>=0.17', 'Pydap', 'requests>=2.8', 'seawater>=3.3', 'simpletable>=0.2', 'xray>=0.6' ], scripts = ['scripts/load_biofloat_cache.py'], # metadata for upload to PyPI author = "Mike McCann", author_email = "[email protected]", description = "Software for working with data from Bio-Argo floats", license = "MIT", keywords = "Oceanography Argo Bio-Argo drifting buoys floats", url = "https://github.com/biofloat/biofloat", )
from setuptools import setup, find_packages setup( name = "biofloat", version = "0.3.0", packages = find_packages(), requires = ['Python (>=2.7)'], install_requires = [ 'beautifulsoup4>=4.4', 'coverage>=4', 'jupyter>=1.0.0', 'matplotlib', 'numpy>=1.10', 'pandas>=0.17', 'Pydap', 'requests>=2.8', 'seawater>=3.3', 'simpletable>=0.2', 'xray>=0.6' ], scripts = ['scripts/load_biofloat_cache.py', 'scripts/woa_calibration.py'], # metadata for upload to PyPI author = "Mike McCann", author_email = "[email protected]", description = "Software for working with data from Bio-Argo floats", license = "MIT", keywords = "Oceanography Argo Bio-Argo drifting buoys floats", url = "https://github.com/biofloat/biofloat", )
Add 'scripts/woa_calibration.py' to scripts list
Add 'scripts/woa_calibration.py' to scripts list
Python
mit
biofloat/biofloat,biofloat/biofloat,MBARIMike/biofloat,MBARIMike/biofloat
5b24a22fa148f4ae4e8d4403824ad7881b27e644
setup.py
setup.py
""" trafficserver_exporter ---------------------- An Apache Traffic Server metrics exporter for Prometheus. Uses the stats_over_http plugin to translate JSON data into Prometheus format. """ from setuptools import setup setup( name='trafficserver_exporter', version='0.0.2', author='Greg Dallavalle', description='Traffic Server metrics exporter for Prometheus', long_description=__doc__, license='Apache Software License 2.0', keywords='prometheus monitoring trafficserver', test_suite='tests', packages=['trafficserver_exporter'], entry_points={ 'console_scripts': [ 'trafficserver_exporter=trafficserver_exporter.__main__:main' ], }, install_requires=[ 'prometheus_client>=0.0.11', 'requests>=2.0.0' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'Topic :: System :: Networking :: Monitoring', 'License :: OSI Approved :: Apache Software License', ], )
""" trafficserver_exporter ---------------------- An Apache Traffic Server metrics exporter for Prometheus. Uses the stats_over_http plugin to translate JSON data into Prometheus format. """ from setuptools import setup setup( name='trafficserver_exporter', version='0.0.3', author='Greg Dallavalle', description='Traffic Server metrics exporter for Prometheus', long_description=__doc__, license='Apache Software License 2.0', keywords='prometheus monitoring trafficserver', test_suite='tests', packages=['trafficserver_exporter'], entry_points={ 'console_scripts': [ 'trafficserver_exporter=trafficserver_exporter.__main__:main' ], }, install_requires=[ 'prometheus_client>=0.0.11', 'requests>=2.0.0' ], classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Intended Audience :: Information Technology', 'Intended Audience :: System Administrators', 'Topic :: System :: Monitoring', 'Topic :: System :: Networking :: Monitoring', 'License :: OSI Approved :: Apache Software License', ], )
Add bumpversion, sync package version
Add bumpversion, sync package version
Python
apache-2.0
gdvalle/trafficserver_exporter
fc6042cf57752ca139c52889ec5e00c02b618d0d
setup.py
setup.py
from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay'], version='0.1.0', author='webpay', author_email='[email protected]', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
from distutils.core import setup, Command class PyTest(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): import sys import subprocess errno = subprocess.call([sys.executable, 'runtests.py']) raise SystemExit(errno) with open('README.rst') as file: long_description = file.read() setup( name='webpay', packages=['webpay', 'webpay.api', 'webpay.model'], version='0.1.0', author='webpay', author_email='[email protected]', url='https://github.com/webpay/webpay-python', description='WebPay Python bindings', cmdclass={'test': PyTest}, long_description=long_description, classifiers=[ 'Development Status :: 4 - Beta', 'Operating System :: OS Independent', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Developers', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], requires=[ 'requests (== 2.0.1)' ] )
Add api and model to packages
Add api and model to packages
Python
mit
yamaneko1212/webpay-python
8b25ce43c2d99876080d84674485ebad07ca4bc0
setup.py
setup.py
#!/usr/bin/env python2 import os from setuptools import setup, find_packages from plugins import __version__ repo_directory = os.path.dirname(__file__) try: long_description = open(os.path.join(repo_directory, 'README.rst')).read() except: long_description = None setup( name='gds-nagios-plugins', version=__version__, packages=find_packages(exclude=['test*']), author='Tom Booth', author_email='[email protected]', maintainer='Government Digital Service', url='https://github.com/alphagov/nagios-plugins', description='nagios-plugins: a set of useful nagios plugins', long_description=long_description, license='MIT', keywords='', setup_requires=['setuptools-pep8'], install_requires=[ "nagioscheck==0.1.6" ], tests_require=[ "nose==1.3.1", "freezegun==0.1.11" ], test_suite='nose.collector', entry_points={ 'console_scripts': [ 'check_apt_security_updates=' 'plugins.command.check_apt_security_updates:main', 'check_reboot_required=plugins.command.check_reboot_required:main', 'check_elasticsearch=plugins.command.check_elasticsearch:main' ] } )
#!/usr/bin/env python2 import os from setuptools import setup, find_packages from plugins import __version__ repo_directory = os.path.dirname(__file__) try: long_description = open(os.path.join(repo_directory, 'README.rst')).read() except: long_description = None setup( name='gds-nagios-plugins', version=__version__, packages=find_packages(exclude=['test*']), author='Tom Booth', author_email='[email protected]', maintainer='Government Digital Service', url='https://github.com/alphagov/nagios-plugins', description='nagios-plugins: a set of useful nagios plugins', long_description=long_description, license='MIT', keywords='', setup_requires=['setuptools-pep8'], install_requires=[ "nagioscheck==0.1.6" ], tests_require=[ "nose >=1.3, <1.4", "freezegun==0.1.11" ], test_suite='nose.collector', entry_points={ 'console_scripts': [ 'check_apt_security_updates=' 'plugins.command.check_apt_security_updates:main', 'check_reboot_required=plugins.command.check_reboot_required:main', 'check_elasticsearch=plugins.command.check_elasticsearch:main' ] } )
Allow looser version of nose
Allow looser version of nose TravisCI provides `nose` already installed: - http://docs.travis-ci.com/user/languages/python/#Pre-installed-packages However it's now at a later version and causes our tests to fail: pkg_resources.VersionConflict: (nose 1.3.4 (/home/travis/virtualenv/python2.7.8/lib/python2.7/site-packages), Requirement.parse('nose==1.3.1'))
Python
mit
alphagov/nagios-plugins
e3d23b47b01deebb25652512be551a128db9c36e
alg_dijkstra_shortest_path.py
alg_dijkstra_shortest_path.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division from ds_binary_heap_tuple import BinaryHeap def dijkstra(weighted_graph_d, start_vertex): inf = float('inf') shortest_path_d = { vertex: inf for vertex in weighted_graph_d } shortest_path_d[start_vertex] = 0 bh = BinaryHeap() # TODO: Continue Dijkstra's algorithm. def main(): weighted_graph_d = { 'u': {'v': 2, 'w': 5, 'x': 1}, 'v': {'u': 2, 'w': 3, 'x': 2}, 'w': {'u': 5, 'v': 3, 'x': 3, 'y': 1, 'z': 5}, 'x': {'u': 1, 'v': 2, 'w': 3, 'y': 1}, 'y': {'w': 1, 'x': 1, 'z': 1}, 'z': {'w': 5, 'y': 1} } if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division from ds_binary_heap_tuple import BinaryHeap def dijkstra(weighted_graph_d, start_vertex): shortest_path_d = { vertex: float('inf') for vertex in weighted_graph_d } shortest_path_d[start_vertex] = 0 bh = BinaryHeap() # TODO: Continue Dijkstra's algorithm. def main(): weighted_graph_d = { 'u': {'v': 2, 'w': 5, 'x': 1}, 'v': {'u': 2, 'w': 3, 'x': 2}, 'w': {'u': 5, 'v': 3, 'x': 3, 'y': 1, 'z': 5}, 'x': {'u': 1, 'v': 2, 'w': 3, 'y': 1}, 'y': {'w': 1, 'x': 1, 'z': 1}, 'z': {'w': 5, 'y': 1} } if __name__ == '__main__': main()
Move inf to shortest_path_d for clarity
Move inf to shortest_path_d for clarity
Python
bsd-2-clause
bowen0701/algorithms_data_structures
a4cacaba81dda523fb6e24f8a4382a334cc549a8
textinator.py
textinator.py
from PIL import Image from os import get_terminal_size default_palette = list('β–‘β–’β–“β–ˆ') print(get_terminal_size()) def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette=default_palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] def convert_image(image_path): original = Image.open(image_path) width, height = original.size thumb = original.copy() thumb.thumbnail(get_terminal_size()) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): line += value_to_char(bw.getpixel((x, y))) print(line) bw.show() if __name__ == '__main__': convert_image('doge.jpg')
import click from PIL import Image def scale(val, src, dst): """ Scale the given value from the scale of src to the scale of dst. """ return ((val - src[0]) / (src[1]-src[0])) * (dst[1]-dst[0]) + dst[0] def value_to_char(value, palette, value_range=(0, 256)): palette_range = (0, len(palette)) mapped = int(scale(value, value_range, palette_range)) return palette[mapped] @click.command() @click.argument('image', type=click.File('rb')) @click.argument('out', type=click.File('r'), default='-', required=False) @click.option('-p', '--palette', default='β–ˆβ–“β–’β–‘ ', help="A custom palette for rendering images. Goes from dark to bright.") @click.option('-w', '--width', type=click.INT, help="Width of output. If height is not given, the image will be proportionally scaled.") @click.option('-h', '--height', type=click.INT, help="Height of output. If width is not given, the image will be proportionally scaled.") def convert(image, palette, out, width, height): """ Converts an input image to a text representation. Writes to stdout by default. Optionally takes another file as a second output. Supported filetypes: anything PIL supports. For JPEG etc., install the prerequisites. """ original = Image.open(image) width, height = original.size thumb = original.copy() thumb.thumbnail(80) bw = thumb.convert(mode="L") width, height = bw.size for y in range(height): line = '' for x in range(width): pixel = bw.getpixel((x, y)) line += value_to_char(pixel, palette) click.echo(line)
Add commandline interface with Click.
Add commandline interface with Click.
Python
mit
ijks/textinator
9f2141bad575e1718fe36a597e5af5b5c795da54
troposphere/openstack/heat.py
troposphere/openstack/heat.py
# Copyright (c) 2012-2013, Mark Peek <[email protected]> # Copyright (c) 2014, Andy Botting <[email protected]> # All rights reserved. # # See LICENSE file for full license. from troposphere import AWSObject from troposphere.validators import integer # Due to the strange nature of the OpenStack compatability layer, some values # that should be integers fail to validate and need to be represented as # strings. For this reason, we duplicate the AWS::AutoScaling::AutoScalingGroup # and change these types. class AutoScalingGroup(AWSObject): type = "AWS::AutoScaling::AutoScalingGroup" props = { 'AvailabilityZones': (list, True), 'Cooldown': (integer, False), 'DesiredCapacity': (basestring, False), 'HealthCheckGracePeriod': (int, False), 'HealthCheckType': (basestring, False), 'LaunchConfigurationName': (basestring, True), 'LoadBalancerNames': (list, False), 'MaxSize': (basestring, True), 'MinSize': (basestring, True), 'Tags': (list, False), 'VPCZoneIdentifier': (list, False), }
# Copyright (c) 2012-2013, Mark Peek <[email protected]> # Copyright (c) 2014, Andy Botting <[email protected]> # All rights reserved. # # See LICENSE file for full license. from troposphere import AWSObject from troposphere.validators import integer # Due to the strange nature of the OpenStack compatability layer, some values # that should be integers fail to validate and need to be represented as # strings. For this reason, we duplicate the AWS::AutoScaling::AutoScalingGroup # and change these types. class AWSAutoScalingGroup(AWSObject): type = "AWS::AutoScaling::AutoScalingGroup" props = { 'AvailabilityZones': (list, True), 'Cooldown': (integer, False), 'DesiredCapacity': (basestring, False), 'HealthCheckGracePeriod': (int, False), 'HealthCheckType': (basestring, False), 'LaunchConfigurationName': (basestring, True), 'LoadBalancerNames': (list, False), 'MaxSize': (basestring, True), 'MinSize': (basestring, True), 'Tags': (list, False), 'VPCZoneIdentifier': (list, False), }
Rename the OpenStack AWS resource to avoid name clash with native
Rename the OpenStack AWS resource to avoid name clash with native OpenStack ships a native ASG resource type with the same name. We rename this to AWSAutoScalingGroup to avoid the clash and make way to the native type to come.
Python
bsd-2-clause
alonsodomin/troposphere,horacio3/troposphere,amosshapira/troposphere,pas256/troposphere,mannytoledo/troposphere,johnctitus/troposphere,Yipit/troposphere,dmm92/troposphere,wangqiang8511/troposphere,jdc0589/troposphere,xxxVxxx/troposphere,ccortezb/troposphere,WeAreCloudar/troposphere,Hons/troposphere,LouTheBrew/troposphere,craigbruce/troposphere,ikben/troposphere,nicolaka/troposphere,7digital/troposphere,cryptickp/troposphere,pas256/troposphere,kid/troposphere,ptoraskar/troposphere,iblazevic/troposphere,mhahn/troposphere,cloudtools/troposphere,garnaat/troposphere,yxd-hde/troposphere,cloudtools/troposphere,ikben/troposphere,horacio3/troposphere,inetCatapult/troposphere,samcrang/troposphere,unravelin/troposphere,7digital/troposphere,jantman/troposphere,johnctitus/troposphere,alonsodomin/troposphere,micahhausler/troposphere,DualSpark/troposphere,dmm92/troposphere
1e68f5f1fd565a812ef3fdf10c4c40649e3ef398
foundation/organisation/search_indexes.py
foundation/organisation/search_indexes.py
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) mailinglist = indexes.CharField(model_attr='mailinglist') homepage = indexes.CharField(model_attr='homepage') twitter = indexes.CharField(model_attr='twitter') def get_model(self): return NetworkGroup
from haystack import indexes from .models import Person, Project, WorkingGroup, NetworkGroup class PersonIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') url = indexes.CharField(model_attr='url') def get_model(self): return Person class ProjectIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') sourcecode_url = indexes.CharField(model_attr='sourcecode_url') def get_model(self): return Project class WorkingGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) incubation = indexes.BooleanField(model_attr='incubation') def get_model(self): return WorkingGroup class NetworkGroupIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) twitter = indexes.CharField(model_attr='twitter') homepage_url = indexes.CharField(model_attr='homepage_url') mailinglist_url = indexes.CharField(model_attr='mailinglist_url') def get_model(self): return NetworkGroup
Fix references to old model fields
organisation: Fix references to old model fields
Python
mit
okfn/foundation,okfn/foundation,okfn/foundation,okfn/website,MjAbuz/foundation,okfn/website,okfn/foundation,okfn/website,okfn/website,MjAbuz/foundation,MjAbuz/foundation,MjAbuz/foundation
82239a844462e721c7034ec42cb4905662f4efb4
bin/mergeSegToCtm.py
bin/mergeSegToCtm.py
#!/usr/bin/python # vim : set fileencoding=utf-8 : # # mergeSegToCtm.py # # Enhance the Bck file by adding extra fields with the diarisation # information # import sys with open(sys.argv[1], 'r', encoding='iso-8859-1') as seg: with open(sys.argv[2], 'r', encoding='iso-8859-1') as ctm: # For each frame, we will create an entry in a dictionnary # It will help the lookup later on # We don't really care about memory issues here, should we? frames = {} for line in seg: values = line.split() start = int(values[2]) duration = int(values[3]) for i in range(start, start + duration): frames[i] = values[4], values[5], values[7] for line in ctm: values = line.split() # Use the same start format than in the .seg file start = int(float(values[2])*100) print(line.strip(), end="") if start in frames: print(" " + frames[start][0] + " " + frames[start][1] + " " + frames[start][2]) else: print(" N/A N/A N/A")
#!/usr/bin/python # vim : set fileencoding=utf-8 : # # mergeSegToCtm.py # # Enhance the CTM file by adding extra fields with the diarisation # information # # First argument is the seg file # Second argument is the ctm file # import sys with open(sys.argv[1], 'r', encoding='iso-8859-1') as seg: with open(sys.argv[2], 'r', encoding='iso-8859-1') as ctm: # For each frame, we will create an entry in a dictionnary # It will help the lookup later on # We don't really care about memory issues here, should we? frames = {} for line in seg: values = line.split() start = int(values[2]) duration = int(values[3]) for i in range(start, start + duration): frames[i] = values[4], values[5], values[7] for line in ctm: values = line.split() # Use the same start format than in the .seg file start = int(float(values[2])*100) print(line.strip(), end="") if start in frames: print(" " + frames[start][0] + " " + frames[start][1] + " " + frames[start][2]) else: print(" N/A N/A N/A")
Fix typo in the script
Fix typo in the script
Python
mit
SG-LIUM/SGL-SpeechWeb-Demo,SG-LIUM/SGL-SpeechWeb-Demo,bsalimi/speech-recognition-api,SG-LIUM/SGL-SpeechWeb-Demo,bsalimi/speech-recognition-api,bsalimi/speech-recognition-api,bsalimi/speech-recognition-api
0ee59d04cb2cbe93a3f4f87a34725fbcd1a66fc0
core/Reader.py
core/Reader.py
# coding: utf8 from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() stream = self.streamClass(*self.args, **self.kwargs) min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 current_position = -min_position ar_index = list() element = deque(stream.read(length)) while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index
# coding: utf8 from io import StringIO from collections import deque class StreamReader: def __init__(self, *args, stream_class=StringIO, **kwargs): self.streamClass = stream_class self.args = args self.kwargs = kwargs def read(self, parsing_pipeline): parsing_pipeline.reset() min_position = parsing_pipeline.get_min_position() max_position = parsing_pipeline.get_max_position() length = max_position - min_position + 1 stream = self.streamClass(*self.args, **self.kwargs) current_position = -min_position ar_index = list() element = deque(stream.read(length)) if len(element) == length: while True: result = parsing_pipeline.check(element, ref_position=-min_position) if result is not None and result[0]: ar_index.append((current_position, element[-min_position])) next_character = stream.read(1) current_position += 1 if next_character and result is not None: element.popleft() element.append(next_character) else: break stream.close() return ar_index else: stream.close() raise ValueError("Not enough characters to parse : " + str(len(element)))
Add not enough characters condition
Add not enough characters condition
Python
mit
JCH222/matriochkas
e9188fdce548106cd8729c2b62a58ba387255f82
feder/virus_scan/signer.py
feder/virus_scan/signer.py
from django.core.signing import TimestampSigner class TokenSigner: signer = TimestampSigner() def unsign(self, value): return self.signer.unsign(value=value, max_age=60 * 60 * 24) def sign(self, value): return self.signer.sign(value)
from django.core.signing import TimestampSigner class TokenSigner: signer = TimestampSigner() def unsign(self, value): return self.signer.unsign(value=value, max_age=60 * 60 * 24 * 7) def sign(self, value): return self.signer.sign(value)
Increase valid period for token
virus_scan: Increase valid period for token
Python
mit
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
6d32f609379febe2fdad690adc75a90e26b8d416
backend/backend/serializers.py
backend/backend/serializers.py
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.ModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') def validate_father(self, father): if (father.gender != Animal.MALE): raise serializers.ValidationError('The father has to be male.') def validate_mother(self, mother): if (mother.gender != Animal.FEMALE): raise serializers.ValidationError('The mother has to be female.') def validate_dob(self, dob): father_id = self.context['request'].data['father'] if (father_id): father = Animal.objects.get(pk = father_id) if (father and father.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s father') mother_id = self.context['request'].data['mother'] if (mother_id): mother = Animal.objects.get(pk = mother_id) if (mother and mother.dob > dob): raise serializers.ValidationError('Animal can\'t be older than it\'s mother')
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
Add validator that selected father is male and mother is female. Validate that the animal is younger than it's parents.
Python
apache-2.0
mmlado/animal_pairing,mmlado/animal_pairing
f2cd1d531a1cefdc5da4b418c866be0d76aa349b
basil_common/str_support.py
basil_common/str_support.py
def as_int(value): try: return int(value) except ValueError: return None
def as_int(value): try: return int(value) except ValueError: return None def urljoin(*parts): url = parts[0] for p in parts[1:]: if url[-1] != '/': url += '/' url += p return url
Add url join which serves our needs
Add url join which serves our needs Existing functions in common libraries add extra slashes.
Python
apache-2.0
eve-basil/common
a40c617ea605bd667a9906f6c9400fc9562d7c0a
salt/daemons/flo/reactor.py
salt/daemons/flo/reactor.py
# -*- coding: utf-8 -*- ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,))
# -*- coding: utf-8 -*- ''' Start the reactor! ''' # Import salt libs import salt.utils.reactor import salt.utils.event # Import ioflo libs import ioflo.base.deeding @ioflo.base.deeding.deedify( 'SaltRaetReactorFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def reactor_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.reactor.Reactor, args=(self.opts.value,)) @ioflo.base.deeding.deedify( 'SaltRaetEventReturnFork', ioinit={ 'opts': '.salt.opts', 'proc_mgr': '.salt.usr.proc_mgr'}) def event_return_fork(self): ''' Add a reactor object to the process manager ''' self.proc_mgr.add_process( salt.utils.event.EventReturn, args=(self.opts.value,))
Add event return fork behavior
Add event return fork behavior
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
14e9bda5de10ef5a1c6dd96692d083f4e0f16025
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
import yaml from yaml import SafeLoader yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
import yaml # Unsafe: yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.load(payload, yaml.Loader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.unsafe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput # Safe yaml.load(payload, yaml.SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, Loader=yaml.SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.load(payload, yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.safe_load(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML # load_all variants yaml.load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.safe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML yaml.unsafe_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput yaml.full_load_all(payload) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
Refactor PyYAML tests a bit
Python: Refactor PyYAML tests a bit
Python
mit
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
0997055c591d7bd4ad4334874292f8977ba778bf
cashew/exceptions.py
cashew/exceptions.py
class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.message = plugin_instance_or_alias else: self.message = plugin_instance_or_alias.alias class NoPlugin(UserFeedback): pass
class CashewException(Exception): pass class InternalCashewException(CashewException): pass class UserFeedback(CashewException): pass class InactivePlugin(UserFeedback): def __init__(self, plugin_instance_or_alias): if isinstance(plugin_instance_or_alias, basestring): self.alias = plugin_instance_or_alias else: self.alias = plugin_instance_or_alias.alias def __str__(self): return "%s is inactive. Some additional software might need to be installed." % (self.alias) class NoPlugin(UserFeedback): pass
Improve error message when alias not available.
Improve error message when alias not available.
Python
mit
dexy/cashew
2d82280460c50d50f6be8d8c8405506b4706cd8a
securethenews/blog/tests.py
securethenews/blog/tests.py
from django.test import TestCase # Create your tests here.
import datetime from django.test import TestCase from wagtail.wagtailcore.models import Page from .models import BlogIndexPage, BlogPost class BlogTest(TestCase): def setUp(self): home_page = Page.objects.get(slug='home') blog_index_page = BlogIndexPage( title='Blog', slug='blog', show_in_menus=True ) home_page.add_child(instance=blog_index_page) blog_posts = [ BlogPost(title='First Blog Post', slug='first-blog-post', date=datetime.date.today(), byline='Author'), BlogPost(title='Second Blog Post', slug='second-blog-post', date=datetime.date.today(), byline='Author') ] for blog_post in blog_posts: blog_index_page.add_child(instance=blog_post) def test_ordering_of_same_day_blogs_on_index(self): """Verify that blog posts posted on the same day are ordered with the most recent at the top of the page.""" blog_index = BlogIndexPage.objects.first() self.assertEqual(blog_index.posts[0].title, 'Second Blog Post') self.assertEqual(blog_index.posts[1].title, 'First Blog Post')
Add unit test to verify that blog posts are ordered by most recent
Add unit test to verify that blog posts are ordered by most recent Verifies that blog posts are ordered by most recent first even if the blog posts are posted on the same day.
Python
agpl-3.0
freedomofpress/securethenews,freedomofpress/securethenews,freedomofpress/securethenews,freedomofpress/securethenews
59e15749671009047ec62cae315a07719d583ac7
build/fbcode_builder_config.py
build/fbcode_builder_config.py
#!/usr/bin/env python from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals 'fbcode_builder steps to build & test Bistro' import specs.fbthrift as fbthrift import specs.folly as folly import specs.proxygen as proxygen from shell_quoting import ShellQuoted # Since Bistro doesn't presently have an "install" target, there is no # point in having its spec in the shared spec directory. def fbcode_builder_spec(builder): return { 'depends_on': [folly, proxygen, fbthrift], 'steps': [ builder.fb_github_project_workdir('bistro/bistro'), builder.step('Build bistro', [ # Future: should this share some code with `cmake_install()`? builder.run(ShellQuoted( 'PATH="$PATH:{p}/bin" ' 'TEMPLATES_PATH="{p}/include/thrift/templates" ' './cmake/run-cmake.sh Debug -DCMAKE_INSTALL_PREFIX={p}' ).format(p=builder.option('prefix'))), builder.workdir('cmake/Debug'), builder.parallel_make(), ]), builder.step('Run bistro tests', [ builder.run(ShellQuoted('ctest')), ]), ] } config = { 'github_project': 'facebook/bistro', 'fbcode_builder_spec': fbcode_builder_spec, }
#!/usr/bin/env python from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals 'fbcode_builder steps to build & test Bistro' import specs.fbthrift as fbthrift import specs.folly as folly import specs.proxygen as proxygen from shell_quoting import ShellQuoted # Since Bistro doesn't presently have an "install" target, there is no # point in having its spec in the shared spec directory. def fbcode_builder_spec(builder): return { 'depends_on': [folly, proxygen, fbthrift], 'steps': [ builder.fb_github_project_workdir('bistro/bistro'), builder.step('Build bistro', [ # Future: should this share some code with `cmake_install()`? builder.run(ShellQuoted( 'PATH="$PATH:"{p}/bin ' 'TEMPLATES_PATH={p}/include/thrift/templates ' './cmake/run-cmake.sh Debug -DCMAKE_INSTALL_PREFIX={p}' ).format(p=builder.option('prefix'))), builder.workdir('cmake/Debug'), builder.parallel_make(), ]), builder.step('Run bistro tests', [ builder.run(ShellQuoted('ctest')), ]), ] } config = { 'github_project': 'facebook/bistro', 'fbcode_builder_spec': fbcode_builder_spec, }
Fix overquoting of thrift paths
oss: Fix overquoting of thrift paths Summary: This overquotes the paths in travis builds. This will fix the opensource broken builds Reviewed By: snarkmaster Differential Revision: D5923131 fbshipit-source-id: 1ff3e864107b0074fc85e8a45a37455430cf4ba3
Python
mit
facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro,facebook/bistro