commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
b7c52258d39e5c0ee8fba2be87e8e671e0c583c3
xclib/postfix_io.py
xclib/postfix_io.py
# Only supports isuser request for Postfix virtual mailbox maps import sys import re import logging # Message formats described in `../doc/Protocol.md` class postfix_io: @classmethod def read_request(cls, infd, outfd): # "for line in sys.stdin:" would be more concise but adds unwanted buffering while True: line = infd.readline() if not line: break match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line) if match: yield ('isuser',) + match.group(1,2) else: logging.error('Illegal request format: ' + line) outfd.write('500 Illegal request format\n') outfd.flush() @classmethod def write_response(cls, flag, outfd): if flag == None: outfd.write('400 Trouble connecting to backend\n') elif flag: outfd.write('200 OK\n') else: outfd.write('500 No such user\n') outfd.flush()
# Only supports isuser request for Postfix virtual mailbox maps import sys import re import logging # Message formats described in `../doc/Protocol.md` class postfix_io: @classmethod def read_request(cls, infd, outfd): # "for line in sys.stdin:" would be more concise but adds unwanted buffering while True: line = infd.readline() if not line: break match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line) if match: yield ('isuser',) + match.group(1,2) elif line == 'quit': yield ('quit',) else: logging.error('Illegal request format: ' + line) outfd.write('500 Illegal request format\n') outfd.flush() @classmethod def write_response(cls, flag, outfd): if flag == None: outfd.write('400 Trouble connecting to backend\n') elif flag: outfd.write('200 OK\n') else: outfd.write('500 No such user\n') outfd.flush()
Add quit command to postfix
Add quit command to postfix
Python
mit
jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth
cc5e75078c707ee2b5622700a0ad2890969193c1
opencademy/model/openacademy_course.py
opencademy/model/openacademy_course.py
from openerp import fields, models ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' name = fields.Char(string='Title', required=True) # field reserved to identified name rec description = fields.Text(string='Description', required=False) responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ]
from openerp import api, fields, models ''' This module create model of Course ''' class Course(models.Model): ''' This class create model of Course ''' _name = 'openacademy.course' name = fields.Char(string='Title', required=True) # field reserved to identified name rec description = fields.Text(string='Description', required=False) responsible_id = fields.Many2one('res.users', ondelete='set null', string="Responsible", index=True) session_ids = fields.One2many('openacademy.session', 'course_id', string="Sessions") _sql_constraints = [ ('name_description_check', 'CHECK(name != description)', "The title of the course should not be the description"), ('name_unique', 'UNIQUE(name)', "The course title must be unique"), ] @api.one # api.one default params: cr, uid, id, context def copy(self, default=None): print "estoy pasando por la funcion heredar de copy en cursos" #default['name'] = self.name + ' (copy) ' copied_count = self.search_count( [('name', '=like', u"Copy of {}%".format(self.name))]) if not copied_count: new_name = u"Copy of {}".format(self.name) else: new_name = u"Copy of {} ({})".format(self.name, copied_count) default['name'] = new_name return super(Course, self).copy(default)
Modify copy method into inherit
[REF] openacademy: Modify copy method into inherit
Python
apache-2.0
LihanHA/opencademy-project
a9f40d7549c43e3e7faf90c79f19a290761d2e08
src/tests/ggrc/__init__.py
src/tests/ggrc/__init__.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: david@reciprocitylabs.com # Maintained By: david@reciprocitylabs.com from flask.ext.testing import TestCase as BaseTestCase from ggrc import db from ggrc.app import app from ggrc.models import create_db, drop_db use_migrations = False class TestCase(BaseTestCase): def setUp(self): create_db(use_migrations, quiet=True) def tearDown(self): db.session.remove() drop_db(use_migrations, quiet=True) def create_app(self): app.testing = True app.debug = False return app
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: david@reciprocitylabs.com # Maintained By: david@reciprocitylabs.com from flask.ext.testing import TestCase as BaseTestCase from ggrc import db from ggrc.app import app from ggrc.models import create_db, drop_db from google.appengine.api import memcache from google.appengine.ext import testbed use_migrations = False class TestCase(BaseTestCase): def setUp(self): create_db(use_migrations, quiet=True) self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_memcache_stub() def tearDown(self): db.session.remove() drop_db(use_migrations, quiet=True) self.testbed.deactivate() def create_app(self): app.testing = True app.debug = False return app
Allow Google AppEngine Memcache stub to be used for running unit tests including calls to caching layer
Allow Google AppEngine Memcache stub to be used for running unit tests including calls to caching layer
Python
apache-2.0
hyperNURb/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core
3be9ef4c2ec4c2b10503633c55fd1634f4d5debb
comics/search/indexes.py
comics/search/indexes.py
from django.template.loader import get_template from django.template import Context from haystack import indexes from haystack import site from comics.core.models import Image class ImageIndex(indexes.SearchIndex): document = indexes.CharField(document=True, use_template=True) rendered = indexes.CharField(indexed=False) def prepare_rendered(self, obj): template = get_template('search/results.html') context = Context({'release': obj.get_first_release()}) return template.render(context) site.register(Image, ImageIndex)
from django.template.loader import get_template from django.template import Context from haystack import indexes from haystack import site from comics.core.models import Image class ImageIndex(indexes.SearchIndex): document = indexes.CharField(document=True, use_template=True) rendered = indexes.CharField(indexed=False) def get_updated_field(self): return 'fetched' def prepare_rendered(self, obj): template = get_template('search/results.html') context = Context({'release': obj.get_first_release()}) return template.render(context) site.register(Image, ImageIndex)
Add get_updated_field to search index
Add get_updated_field to search index
Python
agpl-3.0
jodal/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics
3fde2a6fe3064bab2cfc7adf040f0f4c91f89865
whack/hashes.py
whack/hashes.py
import os import hashlib class Hasher(object): def __init__(self): self._hash = hashlib.sha1() def update(self, arg): self._hash.update(_sha1(arg)) def update_with_dir(self, dir_path): for file_path in _all_files(dir_path): self.update(os.path.relpath(file_path, dir_path)) self.update(open(file_path).read()) def hexdigest(self): return self._hash.hexdigest() def _all_files(top): all_files = [] for root, dirs, files in os.walk(top): for name in files: all_files.append(os.path.join(root, name)) return all_files def _sha1(str): return hashlib.sha1(str).hexdigest()
import os import hashlib class Hasher(object): def __init__(self): self._hash = hashlib.sha1() def update(self, arg): self._hash.update(_sha1(arg)) def update_with_dir(self, dir_path): for file_path in _all_files(dir_path): self.update(os.path.relpath(file_path, dir_path)) self.update(open(file_path).read()) def hexdigest(self): return self._hash.hexdigest() def _all_files(top): all_files = [] for root, dirs, files in os.walk(top): for name in files: all_files.append(os.path.join(root, name)) return sorted(all_files) def _sha1(str): return hashlib.sha1(str).hexdigest()
Sort files before hashing to ensure consistency
Sort files before hashing to ensure consistency
Python
bsd-2-clause
mwilliamson/whack
2b7de99f1de941c66dd282efbdf423e95c104cc9
mysite/missions/management/commands/svn_precommit.py
mysite/missions/management/commands/svn_precommit.py
from django.core.management import BaseCommand, CommandError from mysite.missions import controllers import sys class Command(BaseCommand): args = '<repo_path> <txn_id>' help = 'SVN pre-commit hook for mission repositories' def handle(self, *args, **options): # This management command is called from the mission svn repositories # as the pre-commit hook. It receives the repository path and transaction # ID as arguments, and it receives a description of applicable lock # tokens on stdin. Its environment and current directory are undefined. if len(args) != 2: raise CommandError, 'Exactly two arguments are expected.' repo_path, txn_id = args try: controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id) except controllers.IncorrectPatch, e: sys.stderr.write(str(e) + '\n\n') raise CommandError, 'The commit failed to validate.'
from django.core.management import BaseCommand, CommandError from mysite.missions import controllers import sys class Command(BaseCommand): args = '<repo_path> <txn_id>' help = 'SVN pre-commit hook for mission repositories' def handle(self, *args, **options): # This management command is called from the mission svn repositories # as the pre-commit hook. It receives the repository path and transaction # ID as arguments, and it receives a description of applicable lock # tokens on stdin. Its environment and current directory are undefined. if len(args) != 2: raise CommandError, 'Exactly two arguments are expected.' repo_path, txn_id = args try: controllers.SvnCommitMission.pre_commit_hook(repo_path, txn_id) except controllers.IncorrectPatch, e: sys.stderr.write('\n ' + str(e) + '\n\n') raise CommandError, 'The commit failed to validate.'
Make the error message stand out more for the user when we reject an svn commit.
Make the error message stand out more for the user when we reject an svn commit.
Python
agpl-3.0
SnappleCap/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,mzdaniel/oh-mainline,sudheesh001/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,moijes12/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,heeraj123/oh-mainline,nirmeshk/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,ehashman/oh-mainline,willingc/oh-mainline,onceuponatimeforever/oh-mainline,SnappleCap/oh-mainline,moijes12/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,ehashman/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,sudheesh001/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,moijes12/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,SnappleCap/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,nirmeshk/oh-mainline,openhatch/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,jledbetter/openhatch
30db1d30bc79e121109206ca2bd3b264ca725283
bluebottle/utils/staticfiles_finders.py
bluebottle/utils/staticfiles_finders.py
from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTI_TENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
CHange settings name for tenant dir
CHange settings name for tenant dir
Python
bsd-3-clause
jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
b0701205f0b96645d3643bab5188f349cd604603
binaries/streamer_binaries/__init__.py
binaries/streamer_binaries/__init__.py
import os __version__ = '0.5.0' # Module level variables. ffmpeg = '' """The path to the installed FFmpeg binary.""" ffprobe = '' """The path to the installed FFprobe binary.""" packager = '' """The path to the installed Shaka Packager binary.""" # Get the directory path where this __init__.py file resides. _dir_path = os.path.abspath(os.path.dirname(__file__)) # This will be executed at import time. for _file in os.listdir(_dir_path): if _file.startswith('ffmpeg'): ffmpeg = os.path.join(_dir_path, _file) elif _file.startswith('ffprobe'): ffprobe = os.path.join(_dir_path, _file) elif _file.startswith('packager'): packager = os.path.join(_dir_path, _file)
import os import platform __version__ = '0.5.0' # Get the directory path where this __init__.py file resides. _dir_path = os.path.abspath(os.path.dirname(__file__)) # Compute the part of the file name that indicates the OS. _os = { 'Linux': 'linux', 'Windows': 'win', 'Darwin': 'osx', }[platform.system()] # Compute the part of the file name that indicates the CPU architecture. _cpu = { 'x86_64': 'x64', # Linux/Mac report this key 'AMD64': 'x64', # Windows reports this key 'aarch64': 'arm64', }[platform.machine()] # Module level variables. ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu)) """The path to the installed FFmpeg binary.""" ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu)) """The path to the installed FFprobe binary.""" packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu)) """The path to the installed Shaka Packager binary."""
Fix usage of local streamer_binaries module
build: Fix usage of local streamer_binaries module The old code would search the directory for the binary to use. This worked fine if the package were installed, but when adding the module path to PYTHONPATH, this technique would fail because the folder would have executables for all architetures. Now we will compute the exact filename we expect for each exectuable, allowing the module to be used locally without installation. This is useful for testing pre-release versions of the module. Change-Id: I35d3a1009b677ef9d29379147312abe3d0a7f8b2
Python
apache-2.0
shaka-project/shaka-streamer,shaka-project/shaka-streamer
d57670995709ae60e9cbed575b1ac9e63cba113a
src/env.py
src/env.py
class Environment: def __init__(self, par=None, bnd=None): if bnd: self.binds = bnd else: self.binds = {} self.parent = par if par: self.level = self.parent.level + 1 else: self.level = 0 def get(self, key): if key in self.binds: return self.binds[key] elif self.parent: return self.parent.get(key) else: return None def set(self, key, value): if key in self.binds: self.binds[key] = value elif self.parent: self.parent.set(key,value) else: self.binds[key] = value def __repr__( self): ret = "\n%s:\n" % self.level keys = self.binds.keys() for key in keys: ret = ret + " %5s: %s\n" % (key, self.binds[key]) return ret
class Environment: def __init__(self, par=None, bnd=None): if bnd: self.binds = bnd else: self.binds = {} self.parent = par if par: self.level = self.parent.level + 1 else: self.level = 0 def get(self, key): if key in self.binds: return self.binds[key] elif self.parent: return self.parent.get(key) else: raise ValueError("Invalid symbol " + key) def set(self, key, value): if key in self.binds: self.binds[key] = value elif self.parent: self.parent.set(key,value) else: self.binds[key] = value def __repr__( self): ret = "\n%s:\n" % self.level keys = self.binds.keys() for key in keys: ret = ret + " %5s: %s\n" % (key, self.binds[key]) return ret
Raise an error when a symbol cannot be found
Raise an error when a symbol cannot be found
Python
mit
readevalprintlove/lithp,fogus/lithp,fogus/lithp,readevalprintlove/lithp,magomsk/lithp,readevalprintlove/lithp,fogus/lithp,magomsk/lithp,magomsk/lithp
b8193a26f0a09ea1cba08046432a02a5e263eb29
examples/svg-path.py
examples/svg-path.py
# ----------------------------------------------------------------------------- # Copyright (c) 2009-2016 Nicolas P. Rougier. All rights reserved. # Distributed under the (new) BSD License. # ----------------------------------------------------------------------------- import re import numpy as np from glumpy import app, gl, data, svg, collections from glumpy.transforms import Position, OrthographicProjection, PanZoom, Viewport window = app.Window(800, 800, color=(1,1,1,1)) transform = PanZoom(OrthographicProjection(Position())) @window.event def on_draw(dt): window.clear() paths["antialias"] = -0.5 collections.Collection.draw(paths) paths["antialias"] = +1.0 collections.Collection.draw(paths) @window.event def on_init(): gl.glEnable(gl.GL_DEPTH_TEST) paths = collections.PathCollection("agg+", transform=transform) paths["miter_limit"] = 4.0 paths["linewidth"] = 50.0 paths["color"] = 0.0,0.0,0.0,0.5 path = svg.Path("""M 300,400 c 0,100 200,-100 200,0 c 0,100 -200,-100 -200,0 z""") vertices, closed = path.vertices[0] paths.append(vertices, closed=closed) window.attach(paths["transform"]) window.attach(paths["viewport"]) app.run()
# ----------------------------------------------------------------------------- # Copyright (c) 2009-2016 Nicolas P. Rougier. All rights reserved. # Distributed under the (new) BSD License. # ----------------------------------------------------------------------------- import re import numpy as np from glumpy import app, gl, data, svg, collections from glumpy.transforms import Position, OrthographicProjection, PanZoom, Viewport window = app.Window(800, 800, color=(1,1,1,1)) transform = PanZoom(OrthographicProjection(Position())) @window.event def on_draw(dt): window.clear() paths["antialias"] = -0.5 collections.Collection.draw(paths) paths["antialias"] = +1.0 collections.Collection.draw(paths) @window.event def on_init(): gl.glEnable(gl.GL_DEPTH_TEST) paths = collections.PathCollection("agg", transform=transform) # paths["miter_limit"] = 4.0 paths["linewidth"] = 50.0 paths["color"] = 0.0,0.0,0.0,0.5 path = svg.Path("""M 300,400 c 0,100 200,-100 200,0 c 0,100 -200,-100 -200,0 z""") vertices, closed = path.vertices[0] paths.append(vertices, closed=closed) window.attach(paths["transform"]) window.attach(paths["viewport"]) app.run()
Switch back the good agg version (instead of agg+)
Switch back the good agg version (instead of agg+)
Python
bsd-3-clause
glumpy/glumpy,glumpy/glumpy
d54544ecf6469eedce80d6d3180aa826c1fcc19a
cpgintegrate/__init__.py
cpgintegrate/__init__.py
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
import pandas import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: source = getattr(file, 'name', None) subject_id = getattr(file, 'cpgintegrate_subject_id', None) try: df = processor(file) except Exception as e: raise ProcessingException({"Source": source, 'SubjectID': subject_id}) from e yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID") class ProcessingException(Exception): """cpgintegrate processing error"""
Add file source and subjectID to processing exceptions
Add file source and subjectID to processing exceptions
Python
agpl-3.0
PointyShinyBurning/cpgintegrate
4c5ce80d469619de6a324bd779c6884f6070106f
homepage/__init__.py
homepage/__init__.py
""" hello/__init__.py ------------------ Initializes Flask application and brings all components together. """ from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_misaka import Misaka # Create application object app = Flask(__name__, instance_relative_config=True, static_url_path='static', static_folder='staticfiles') # Load default configuration settings app.config.from_object('config.default') # Load non-VC configuration variables from instance folder app.config.from_pyfile('instance.cfg', silent=True) # Load settings specified by APP_CONFIG_FILE environment variable # (such as 'config.development' or 'config.production') # Variables defined here will override default configurations #app.config.from_envvar('APP_CONFIG_FILE', silent=True) # Disable Flask-SQLAlchemy event notification system. app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # Explicitly add debugger middleware if app.debug: from werkzeug.debug import DebuggedApplication app.wsgi_app = DebuggedApplication(app.wsgi_app, True) # Create SQLAlchemy object (database) db = SQLAlchemy(app) # Use Misaka for markdown templates Misaka(app) # Import main views module (main pages) from homepage import views # Import admin views from homepage import admin
""" hello/__init__.py ------------------ Initializes Flask application and brings all components together. """ from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_misaka import Misaka # Create application object app = Flask(__name__, instance_relative_config=True, static_url_path='/static', static_folder='staticfiles') # Load default configuration settings app.config.from_object('config.default') # Load non-VC configuration variables from instance folder app.config.from_pyfile('instance.cfg', silent=True) # Load settings specified by APP_CONFIG_FILE environment variable # (such as 'config.development' or 'config.production') # Variables defined here will override default configurations #app.config.from_envvar('APP_CONFIG_FILE', silent=True) # Disable Flask-SQLAlchemy event notification system. app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # Explicitly add debugger middleware if app.debug: from werkzeug.debug import DebuggedApplication app.wsgi_app = DebuggedApplication(app.wsgi_app, True) # Create SQLAlchemy object (database) db = SQLAlchemy(app) # Use Misaka for markdown templates Misaka(app) # Import main views module (main pages) from homepage import views # Import admin views from homepage import admin
Fix static url path error.
Fix static url path error.
Python
mit
kennyng/kennyng.org,kennyng/kennyng.org,kennyng/kennyng.org,kennyng/kennyng.org
0480c304488e4c8bb9908babafa13745b007ee0a
setup.py
setup.py
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst # NOTE: most of the configuration, including the version number, # is defined in setup.cfg import sys from distutils.version import LooseVersion import setuptools from setuptools import setup if LooseVersion(setuptools.__version__) < '30.3': sys.stderr.write("ERROR: setuptools 30.3 or later is required by astropy-helpers\n") sys.exit(1) from astropy_helpers.version_helpers import generate_version_py # noqa version = generate_version_py() setup(version=version)
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst # NOTE: most of the configuration, including the version number, # is defined in setup.cfg import os import sys from distutils.version import LooseVersion import setuptools from setuptools import setup if LooseVersion(setuptools.__version__) < '30.3': sys.stderr.write("ERROR: setuptools 30.3 or later is required by astropy-helpers\n") sys.exit(1) # Need to add current directory to be able to import astropy-helpers # despite PEP517/518 build isolation sys.path.append(os.path.abspath(".")) from astropy_helpers.version_helpers import generate_version_py # noqa version = generate_version_py() setup(version=version)
Fix pip installs of astropy-helpers
Fix pip installs of astropy-helpers
Python
bsd-3-clause
astropy/astropy-helpers,astropy/astropy-helpers
45348030b5dee0f83632671c26d0ef63287160bb
setup.py
setup.py
from os.path import join, dirname from setuptools import setup import django_ponydebugger with open(join(dirname(__file__), 'README.rst')) as f: readme = f.read() with open(join(dirname(__file__), 'LICENSE')) as f: license = f.read() setup( name='django-ponydebugger', version=django_ponydebugger.__version__, description='PonyDebugger support for Django', long_description=readme, packages=[ 'django_ponydebugger', 'django_ponydebugger.domains', ], package_data={ 'django_ponydebugger': ['django-icon.png'], }, install_requires=[ 'websocket-client', ], author='Matthew Eastman', author_email='matt@educreations.com', url='https://github.com/educreations/django-ponydebugger', license=license, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Debuggers', ], )
import os from setuptools import setup import django_ponydebugger with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f: readme = f.read() setup( name='django-ponydebugger', version=django_ponydebugger.__version__, description='PonyDebugger support for Django', long_description=readme, packages=[ 'django_ponydebugger', 'django_ponydebugger.domains', ], package_data={ 'django_ponydebugger': ['django-icon.png'], }, install_requires=[ 'websocket-client', ], author='Matthew Eastman', author_email='matt@educreations.com', url='https://github.com/educreations/django-ponydebugger', license='MIT', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Debuggers', ], )
Use a short string for the license.
Use a short string for the license.
Python
mit
educreations/django-ponydebugger
f74bc34f40ac746d501e164ea4fd31eda4bf4b21
setup.py
setup.py
from setuptools import setup entry_points = { 'console_scripts': [ 'whatportis=whatportis.main:run', ] } requirements = open('requirements.txt').read() readme = open('README.rst').read() setup( name="whatportis", version="0.3", url='http://github.com/ncrocfer/whatportis', author='Nicolas Crocfer', author_email='ncrocfer@gmail.com', description="A command to search port names and numbers", long_description=readme, packages=['whatportis'], include_package_data=True, install_requires=requirements, entry_points=entry_points, classifiers=( 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Natural Language :: English', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ), )
from setuptools import setup entry_points = { 'console_scripts': [ 'whatportis=whatportis.main:run', ] } requirements = open('requirements.txt').read() readme = open('README.rst').read() setup( name="whatportis", version="0.4", url='http://github.com/ncrocfer/whatportis', author='Nicolas Crocfer', author_email='ncrocfer@gmail.com', description="A command to search port names and numbers", long_description=readme, packages=['whatportis'], include_package_data=True, install_requires=requirements, entry_points=entry_points, classifiers=( 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Natural Language :: English', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4' ), )
Update version (0.3 -> 0.4)
Update version (0.3 -> 0.4)
Python
mit
ncrocfer/whatportis
16812eadadecdb4449f796f453e891d1adecf95d
setup.py
setup.py
#!/usr/bin/python -tt # -*- coding: utf-8 -*- from setuptools import find_packages, setup import kitchen.release setup(name='kitchen', version=kitchen.release.__version__, description=kitchen.release.DESCRIPTION, author=kitchen.release.AUTHOR, author_email=kitchen.release.EMAIL, license=kitchen.release.LICENSE, url=kitchen.release.URL, download_url=kitchen.release.DOWNLOAD_URL, keywords='Useful Small Code Snippets', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python :: 2.3', 'Programming Language :: Python :: 2.4', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: General', ], packages=find_packages(), data_files = [], )
#!/usr/bin/python -tt # -*- coding: utf-8 -*- from setuptools import find_packages, setup import kitchen.release setup(name='kitchen', version=kitchen.release.__version__, description=kitchen.release.DESCRIPTION, author=kitchen.release.AUTHOR, author_email=kitchen.release.EMAIL, license=kitchen.release.LICENSE, url=kitchen.release.URL, download_url=kitchen.release.DOWNLOAD_URL, keywords='Useful Small Code Snippets', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python :: 2.3', 'Programming Language :: Python :: 2.4', 'Programming Language :: Python :: 2.5', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: General', ], packages=find_packages(), data_files = [], )
Add Python-2.7 as a platform kitchen runs on
Add Python-2.7 as a platform kitchen runs on
Python
lgpl-2.1
fedora-infra/kitchen,fedora-infra/kitchen
375657de7eff50c182bd78196d22101a1a8bcb91
setup.py
setup.py
""" Drupdates setup script. """ try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='Drupdates', description='Drupal updates scripts', author='Jim Taylor', url='https://github.com/jalama/drupdates', download_url='https://github.com/jalama/drupdates', author_email='jalama@gmail.com', version='1.4.0', package_dir={'drupdates' : 'drupdates', 'drupdates.tests' : 'drupdates/tests'}, include_package_data=True, install_requires=['nose', 'gitpython', 'requests', 'pyyaml'], entry_points={ 'console_scripts': ['drupdates = drupdates.cli:main'], }, packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: System :: Systems Administration', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Bug Tracking', ], )
""" Drupdates setup script. """ try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='Drupdates', description='Drupal updates scripts', author='Jim Taylor', url='https://github.com/jalama/drupdates', download_url='https://github.com/jalama/drupdates', author_email='jalama@gmail.com', version='1.4.0', package_dir={'drupdates' : 'drupdates', 'drupdates.tests' : 'drupdates/tests'}, include_package_data=True, install_requires=['nose', 'gitpython', 'requests', 'pyyaml'], entry_points={ 'console_scripts': ['drupdates = drupdates.cli:main'], }, packages=['drupdates', 'drupdates.tests'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Operating System :: POSIX', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: System :: Systems Administration', 'Topic :: Software Development :: Build Tools', 'Topic :: Software Development :: Bug Tracking', ], )
Update Development Status to stable
Update Development Status to stable
Python
mit
jalama/drupdates
162d5514145f3797e0b9fb6dd19c948d9ddc82b1
setup.py
setup.py
""" crmngr setup module. """ from setuptools import setup, find_packages from crmngr.version import __version__ setup( name='crmngr', author='Andre Keller', author_email='andre.keller@vshn.ch', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python :: 3 :: Only', 'Topic :: System :: Systems Administration', ], description='manage a r10k-style control repository', entry_points={ 'console_scripts': [ 'crmngr = crmngr:main' ] }, install_requires=[ 'natsort>=4.0.0', 'requests>=2.1.0', ], # BSD 3-Clause License: # - http://opensource.org/licenses/BSD-3-Clause license='BSD', packages=find_packages(), url='https://github.com/vshn/crmngr', version=__version__, )
""" crmngr setup module. """ from pathlib import Path import re from setuptools import setup, find_packages # Read version from crmngr/version.py # we do not import, as this fails if dependencies from install_depends are not # available with open(Path('./crmngr/version.py')) as version_file: VERSION_REGEX = re.compile( r'^\s*__version__\s*=\s*["\'](?P<version>.*)["\']\s*$' ) for line in version_file.readlines(): version_match = VERSION_REGEX.match(line) if version_match: __version__ = version_match.groupdict()['version'] break else: __version__ = 'unknown' raise Exception('Could not get current version of nacli from ' './nacli/version.py') setup( name='crmngr', author='Andre Keller', author_email='andre.keller@vshn.ch', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python :: 3 :: Only', 'Topic :: System :: Systems Administration', ], description='manage a r10k-style control repository', entry_points={ 'console_scripts': [ 'crmngr = crmngr:main' ] }, install_requires=[ 'natsort>=4.0.0', 'requests>=2.1.0', ], # BSD 3-Clause License: # - http://opensource.org/licenses/BSD-3-Clause license='BSD', packages=find_packages(), url='https://github.com/vshn/crmngr', version=__version__, )
Change version lookup for packaging.
Change version lookup for packaging. The current approach imports the version directly from crmngr. This is a problem if the build environment does not have crmngr runtime libraries installed and leads to having unnecessary build dependencies. The new approach extracts the version from the python file directly, allowing to have a single source of version, whitout needing to import runtime dependencies during the build process.
Python
bsd-3-clause
vshn/crmngr
15e493c5402d07cb735fb8773ad499d1e5b19e19
setup.py
setup.py
from setuptools import setup, find_packages version = '0.2' setup( name='ckanext-oaipmh', version=version, description="OAI-PMH harvester for CKAN", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='Mikael Karlsson', author_email='i8myshoes@gmail.com', url='https://github.com/kata-csc/ckanext-oaipmh', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['ckanext', 'ckanext.oaipmh'], include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- 'pyoai', 'ckanext-harvest', 'lxml', 'rdflib', 'beautifulsoup4', 'pointfree', 'functionally', 'fn', ], entry_points=\ """ [ckan.plugins] # Add plugins here, eg oaipmh_harvester=ckanext.oaipmh.harvester:OAIPMHHarvester """, )
from setuptools import setup, find_packages version = '0.2' setup( name='ckanext-oaipmh', version=version, description="OAI-PMH harvester for CKAN", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='CSC - IT Center for Science Ltd.', author_email='kata-project@postit.csc.fi', url='https://github.com/kata-csc/ckanext-oaipmh', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['ckanext', 'ckanext.oaipmh'], include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- 'pyoai', 'ckanext-harvest', 'lxml', 'rdflib', 'beautifulsoup4', 'pointfree', 'functionally', 'fn', ], entry_points=\ """ [ckan.plugins] # Add plugins here, eg oaipmh_harvester=ckanext.oaipmh.harvester:OAIPMHHarvester """, )
Update author to CSC - IT Center for Science Ltd.
Update author to CSC - IT Center for Science Ltd.
Python
agpl-3.0
kata-csc/ckanext-oaipmh,kata-csc/ckanext-oaipmh,kata-csc/ckanext-oaipmh
fe5ddba257885aa166bd71696a6eeefad153e66a
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "deisss@free.fr", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: MIT Licence", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import sockjsroom # Setup library setup( # Pypi name name = "sockjsroom", # Release version version = sockjsroom.__version__, # Associated package packages = find_packages(), # Author author = "Deisss", author_email = "deisss@free.fr", # Package description description = "Sockjs-tornado multi room system", long_description = open('README.md').read(), # Require sockjs-tornado install_requires = ["tornado", "sockjs-tornado"], # Add MANIFEST.in include_package_data = True, # Github url url = "https://github.com/Deisss/python-sockjsroom", # Metadata classifiers=[ "Programming Language :: Python", "Development Status :: 1 - Planning", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Topic :: Communications", ], )
Switch to OSI license for Pypi
Switch to OSI license for Pypi
Python
mit
Deisss/python-sockjsroom,Deisss/python-sockjsroom,Deisss/python-sockjsroom
64e2e80570268c2be47198495334ccffcd387b35
setup.py
setup.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import codecs from setuptools import setup, find_packages setup( name='Venom', version='1.0.0a1', packages=find_packages(exclude=['*tests*']), url='https://github.com/biosustain/venom', license='MIT', author='Lars Schöning', author_email='lays@biosustain.dtu.dk', description='Venom is an upcoming RPC framework for Python', long_description=codecs.open('README.rst', encoding='utf-8').read(), test_suite='nose.collector', tests_require=[ 'aiohttp', 'nose' ], install_requires=[ ], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], zip_safe=False, extras_require={ 'docs': ['sphinx'], 'aiohttp': ['aiohttp'], 'grpc': ['grpcio'], 'zmq': ['pyzmq'], } )
# -*- coding: utf-8 -*- from __future__ import unicode_literals import codecs from setuptools import setup, find_packages setup( name='venom', version='1.0.0a1', packages=find_packages(exclude=['*tests*']), url='https://github.com/biosustain/venom', license='MIT', author='Lars Schöning', author_email='lays@biosustain.dtu.dk', description='Venom is an upcoming RPC framework for Python', long_description=codecs.open('README.rst', encoding='utf-8').read(), test_suite='nose.collector', tests_require=[ 'aiohttp', 'ujson', 'nose' ], install_requires=[], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules' ], zip_safe=False, extras_require={ 'docs': ['sphinx'], 'aiohttp': ['aiohttp', 'ujson'], 'grpc': ['grpcio'], 'zmq': ['pyzmq'], } )
Add 'ujson' requirement for tests & aiohttp
Add 'ujson' requirement for tests & aiohttp
Python
mit
biosustain/venom
8a630fc6e33a76def9d8288a3d04fcacbc3e1d03
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ rack-ddnsm ~~~~~ Dynamic DNS metadata for Rackspace Cloud DNS, manages TXT records containing metadata in the format of title,desc,data. :copyright: (c) 2015 by Alex Edwards. :license: MIT, see LICENSE for more details. :repo: <https://github.com/sunshinekitty/rack-ddnsm> :docs: <https://github.com/sunshinekitty/rack-ddnsm/wiki> """ from setuptools import setup import re with open("rackddnsm/version.py", "rt") as vfile: version_text = vfile.read() vmatch = re.search(r'version ?= ?"(.+)"$', version_text) version = vmatch.groups()[0] setup( name="rack-ddnsm", version=version, description="Python language bindings for Encore.", author="Alex Edwards", author_email="edwards@linux.com", url="https://github.com/sunshinekitty/rack-ddnsm>", keywords="rackspace cloud dns meta ddns dns", classifiers=[ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: MIT License", "Programming Language :: Python", ], install_requires=[ "requests>=2.2.1", "dnspython>=1.12.0" ], packages=[ "rack-ddnsm", ] )
#!/usr/bin/env python # -*- coding: utf-8 -*- """ rack-ddnsm ~~~~~ Dynamic DNS metadata for Rackspace Cloud DNS, manages TXT records containing metadata in the format of title,desc,data. :copyright: (c) 2015 by Alex Edwards. :license: MIT, see LICENSE for more details. :repo: <https://github.com/sunshinekitty/rack-ddnsm> :docs: <https://github.com/sunshinekitty/rack-ddnsm/wiki> """ from setuptools import setup import re with open("rackddnsm/version.py", "rt") as vfile: version_text = vfile.read() vmatch = re.search(r'version ?= ?"(.+)"$', version_text) version = vmatch.groups()[0] setup( name="rackddnsm", version=version, description="Python language bindings for Encore.", author="Alex Edwards", author_email="edwards@linux.com", url="https://github.com/sunshinekitty/rack-ddnsm>", keywords="rackspace cloud dns meta ddns dns", classifiers=[ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: MIT License", "Programming Language :: Python", ], install_requires=[ "requests>=2.2.1", "dnspython>=1.12.0" ], packages=[ "rackddnsm", ] )
Fix other names of packages
Fix other names of packages
Python
mit
sunshinekitty/rack-ddnsm
68ca61ec2206b83cca34a319a472961793771407
setup.py
setup.py
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() def package_data(package): package_data = [] for dirpath, dirnames, filenames in os.walk( os.path.join(os.path.dirname(__file__), package)): for i, dirname in enumerate(dirnames): if dirname.startswith('.'): del dirnames[i] if '__init__.py' in filenames: continue elif filenames: for f in filenames: package_data.append( os.path.join(dirpath[len(package)+len(os.sep):], f)) return {package: package_data} setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='andy@andybak.net', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data=package_data('linkcheck'), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='django-linkcheck', version='0.1.0', description="A Django app that will analyze and report on links in any " "model that you register with it.", long_description=read('README.rst'), author='Andy Baker', author_email='andy@andybak.net', license='BSD', url='http://github.com/andybak/django-linkcheck/', packages=[ 'linkcheck', 'linkcheck.management', 'linkcheck.management.commands', ], package_data={ 'linkcheck': [ 'templates/linkcheck/*.html', 'templates/linkcheck/*.xhtml', ] }, classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], )
Use static definition for package data.
Use static definition for package data.
Python
bsd-3-clause
Ixxy-Open-Source/django-linkcheck-old,claudep/django-linkcheck,Ixxy-Open-Source/django-linkcheck-old,claudep/django-linkcheck,AASHE/django-linkcheck,yvess/django-linkcheck,DjangoAdminHackers/django-linkcheck,DjangoAdminHackers/django-linkcheck
27851b9f2371853f1386d0033d0b9b2d0ef74b75
setup.py
setup.py
import sys from setuptools import setup # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) setup( name='django-verified-email', version='0.1.1.dev0', description='Verified email changes for django', long_description=long_description, license='BSD', packages=['verified_email_change'], install_requires=[ 'Django>=1.7', 'django-ogmios', 'django-decoratormixins', 'django-absoluteuri', ], cmdclass={'test': PyTest}, tests_require=[ 'pytest', 'pytest-cov', 'pytest-django', ], )
import sys from setuptools import setup # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the relevant file with open(path.join(here, 'README.rst'), encoding='utf-8') as f: long_description = f.read() from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) setup( name='django-verified-email', version='0.1.1', description='Verified email changes for django', long_description=long_description, license='BSD', packages=['verified_email_change'], install_requires=[ 'Django>=1.7', 'django-ogmios', 'django-decoratormixins', 'django-absoluteuri', ], cmdclass={'test': PyTest}, tests_require=[ 'pytest', 'pytest-cov', 'pytest-django', ], package_data={ '': ['templates/**/*.html'], }, )
Include templates in package distribution
Include templates in package distribution
Python
bsd-2-clause
fusionbox/django-verified-email-change,fusionbox/django-verified-email-change
a28923d292abcb4b509b4a7eb95bdf2982c90bab
setup.py
setup.py
# coding: utf-8 from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], # , b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
# coding: utf-8 from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
Add "box.text" back to namespaced package
Add "box.text" back to namespaced package
Python
apache-2.0
box/genty,box/genty
de548682a8cae05fac4b303d08ec321fb683cc63
setup.py
setup.py
from setuptools import setup setup( name="public-drive-urls", version='0.4.0', author="Brian Peterson", author_email="bepetersn@gmail.com", description="Find Google Drive download URLs from a file's sharing URL", license="MIT", url='https://github.com/bepetersn/public-drive-urls/', py_modules=['public_drive_urls'], classifiers=[ ], install_requires=['requests'], extras_require={ 'test': ['nose', 'mock'], 'dev': ['pip-tools'] } )
from setuptools import setup setup( name="public-drive-urls", version='1.0.0', author="Brian Peterson", author_email="bepetersn@gmail.com", description="Find Google Drive download URLs from a file's sharing URL", license="MIT", url='https://github.com/bepetersn/public-drive-urls/', py_modules=['public_drive_urls'], classifiers=[ ], install_requires=['requests'], extras_require={ 'test': ['nose', 'mock'], 'dev': ['pip-tools'] } )
Bump version to a major number now that it's tested and we broke some APIs
Bump version to a major number now that it's tested and we broke some APIs
Python
mit
bepetersn/public-drive-urls
b9dbf4861f1c8b8ae70a130617d1913ef4efb539
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages VERSION = '0.1.4' setup( name='spockbot', description='High level Python framework for building Minecraft ' 'clients and bots', license='MIT', long_description=open('README.rst').read(), version=VERSION, url='https://github.com/SpockBotMC/SpockBot', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'cryptography >= 0.9', 'minecraft_data == 0.4.0', 'six', ], keywords=['minecraft'], classifiers=[ 'License :: OSI Approved :: MIT License', ] )
from distutils.core import setup from setuptools import find_packages VERSION = '0.2.0' setup( name='spockbot', description='High level Python framework for building Minecraft ' 'clients and bots', license='MIT', long_description=open('README.rst').read(), version=VERSION, url='https://github.com/SpockBotMC/SpockBot', packages=find_packages(exclude=['tests', 'tests.*']), install_requires=[ 'cryptography >= 0.9', 'minecraft_data == 0.4.0', 'six', ], keywords=['minecraft'], classifiers=[ 'License :: OSI Approved :: MIT License', ] )
Increase version number so we can start adding 1.9 support
Increase version number so we can start adding 1.9 support
Python
mit
Gjum/SpockBot,SpockBotMC/SpockBot
d89f7d742813b70d7bccdf90e9722d727599f977
setup.py
setup.py
import os.path from setuptools import find_packages from setuptools import setup version_path = os.path.join(os.path.dirname(__file__), 'VERSION') with open(version_path) as fh: version = fh.read().strip() setup(name='vector-datasource', version=version, description="", long_description="""\ """, classifiers=[], keywords='', author='', author_email='', url='', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'test']), include_package_data=True, zip_safe=False, install_requires=[ 'ASTFormatter', 'mapbox-vector-tile', 'ModestMaps >= 1.3.0', 'pycountry', 'simplejson', 'StreetNames', 'tilequeue', 'kdtree', 'webcolors', ], test_suite='test', tests_require=[ ], entry_points=""" # -*- Entry points: -*- """, )
import os.path from setuptools import find_packages from setuptools import setup version_path = os.path.join(os.path.dirname(__file__), 'VERSION') with open(version_path) as fh: version = fh.read().strip() setup(name='vector-datasource', version=version, description="", long_description="""\ """, classifiers=[], keywords='', author='', author_email='', url='', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'test']), include_package_data=True, zip_safe=False, install_requires=[ 'ASTFormatter', 'mapbox-vector-tile', 'ModestMaps >= 1.3.0', 'pycountry', 'pyshp', 'simplejson', 'StreetNames', 'tilequeue', 'kdtree', 'webcolors', ], test_suite='test', tests_require=[ ], entry_points=""" # -*- Entry points: -*- """, )
Add shapefile library, used for shapefile-based fixtures.
Add shapefile library, used for shapefile-based fixtures.
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
00b822d2523708f333e214fc7f507ef3bf1ca865
setup.py
setup.py
import os try: from setuptools import setup except ImportError: from distutils.core import setup from pypvwatts.__version__ import VERSION setup( name='pypvwatts', version=VERSION, author='Miguel Paolino', author_email='mpaolino@gmail.com', url='https://github.com/mpaolino/pypvwatts', download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip', description='Python wrapper for NREL PVWatts\'s API.', long_description=open('README.md').read(), packages=['pypvwatts'], provides=['pypvwatts'], requires=['requests'], install_requires=['requests >= 2.1.0'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'License :: OSI Approved :: MIT License', 'Topic :: Internet', 'Topic :: Internet :: WWW/HTTP', ], keywords='nrel pvwatts pypvwatts', license='MIT', )
import os try: from setuptools import setup except ImportError: from distutils.core import setup from pypvwatts.__version__ import VERSION setup( name='pypvwatts', version=VERSION, author='Miguel Paolino', author_email='mpaolino@gmail.com', url='https://github.com/mpaolino/pypvwatts', download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip', description='Python wrapper for NREL PVWatts\'s API.', long_description=open('README.md').read(), packages=['pypvwatts'], provides=['pypvwatts'], requires=['requests'], install_requires=['requests >= 2.1.0'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'License :: OSI Approved :: MIT License', 'Topic :: Internet', 'Topic :: Internet :: WWW/HTTP', ], keywords='nrel pvwatts pypvwatts', license='MIT', python_requires=">=2.7", )
Make sure we require at least python 2.7
Make sure we require at least python 2.7
Python
mit
mpaolino/pypvwatts
eb4ed4f0b1086a1f5322078d91d2132582a487e7
setup.py
setup.py
import os import setuptools README = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.rst') setuptools.setup( name='theanets', version='0.8.0pre', packages=setuptools.find_packages(), author='Leif Johnson', author_email='leif@lmjohns3.com', description='Feedforward and recurrent neural nets using Theano', long_description=open(README).read(), license='MIT', url='http://github.com/lmjohns3/theanets', keywords=('machine-learning ' 'neural-network ' 'deep-neural-network ' 'recurrent-neural-network ' 'autoencoder ' 'sparse-autoencoder ' 'classifier ' 'theano ' ), install_requires=['climate', 'downhill', 'theano'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Artificial Intelligence', ], )
import os import setuptools README = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'README.rst') setuptools.setup( name='theanets', version='0.8.0pre', packages=setuptools.find_packages(), author='Leif Johnson', author_email='leif@lmjohns3.com', description='Feedforward and recurrent neural nets using Theano', long_description=open(README).read(), license='MIT', url='http://github.com/lmjohns3/theanets', keywords=('machine-learning ' 'neural-network ' 'deep-neural-network ' 'recurrent-neural-network ' 'autoencoder ' 'sparse-autoencoder ' 'classifier ' 'theano ' ), install_requires=['climate', 'downhill', 'theano', # TODO(leif): remove when theano is fixed. 'nose-parameterized', ], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Artificial Intelligence', ], )
Add a temporary dependency on nose-parameterized.
Add a temporary dependency on nose-parameterized.
Python
mit
chrinide/theanets,lmjohns3/theanets
936cc203637475c57c2ed26a6fa31167c7e4ebda
setup.py
setup.py
# Copyright (c) 2008-2009 Simplistix Ltd # See license.txt for license details. import os from setuptools import setup, find_packages name = 'testfixtures' base_dir = os.path.dirname(__file__) setup( name=name, version=file(os.path.join(base_dir,name,'version.txt')).read().strip(), author='Chris Withers', author_email='chris@simplistix.co.uk', license='MIT', description="A collection of helpers and mock objects for unit tests and doc tests.", long_description=open(os.path.join(base_dir,'docs','description.txt')).read(), url='http://www.simplistix.co.uk/software/python/testfixtures', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', ], packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=( 'zope.dottedname', ), extras_require=dict( test=['mock'], ) )
# Copyright (c) 2008-2009 Simplistix Ltd # See license.txt for license details. import os from setuptools import setup, find_packages name = 'testfixtures' base_dir = os.path.dirname(__file__) setup( name=name, version=file(os.path.join(base_dir,name,'version.txt')).read().strip(), author='Chris Withers', author_email='chris@simplistix.co.uk', license='MIT', description="A collection of helpers and mock objects for unit tests and doc tests.", long_description=open(os.path.join(base_dir,'docs','description.txt')).read(), url='http://www.simplistix.co.uk/software/python/testfixtures', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', ], packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=( 'zope.dottedname', ), extras_require=dict( test=['mock','manuel'], ) )
Use Manuel instead of doctest. Missed a bit :-(
Use Manuel instead of doctest. Missed a bit :-(
Python
mit
nebulans/testfixtures,Simplistix/testfixtures
d1d0576b94ce000a77e08bd8353f5c1c10b0839f
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup( name = 'AudioTranscode', version = '1.0', packages = ['audioTranscode'], scripts = ['transcode'], author = 'Jeffrey Aylesworth', author_email = 'jeffrey@aylesworth.ca', license = 'MIT', url = 'http://github.com/jeffayle/Transcode' )
#!/usr/bin/env python from distutils.core import setup setup( name = 'AudioTranscode', version = '1.0', packages = ['audioTranscode','audioTranscode.encoders','audioTranscode.decoders'], scripts = ['transcode'], author = 'Jeffrey Aylesworth', author_email = 'jeffrey@aylesworth.ca', license = 'MIT', url = 'http://github.com/jeffayle/Transcode' )
Include .encoders and .decoders packages with the distribution
Include .encoders and .decoders packages with the distribution
Python
isc
jeffayle/Transcode
a214b216ba260977eace4c9aff260089dd761a8f
setup.py
setup.py
"""setup.py""" #pylint:disable=line-too-long from codecs import open as codecs_open try: from setuptools import setup except ImportError: from distutils.core import setup #pylint:disable=import-error,no-name-in-module with codecs_open('README.rst', 'r', 'utf-8') as f: readme = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: history = f.read() setup( name='jsonrpcserver', version='2.0.2', description='JSON-RPC server library.', long_description=readme + '\n\n' + history, author='Beau Barker', author_email='beauinmelbourne@gmail.com', url='https://jsonrpcserver.readthedocs.org/', packages=['jsonrpcserver'], package_data={'jsonrpcserver': ['request-schema.json']}, include_package_data=True, install_requires=['jsonschema'], tests_require=['tox'], classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Operating System :: OS Independent' ], )
"""setup.py""" #pylint:disable=line-too-long from codecs import open as codecs_open try: from setuptools import setup except ImportError: from distutils.core import setup #pylint:disable=import-error,no-name-in-module with codecs_open('README.rst', 'r', 'utf-8') as f: readme = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: history = f.read() setup( name='jsonrpcserver', version='2.0.2', description='JSON-RPC server library.', long_description=readme + '\n\n' + history, author='Beau Barker', author_email='beauinmelbourne@gmail.com', url='https://jsonrpcserver.readthedocs.org/', packages=['jsonrpcserver'], package_data={'jsonrpcserver': ['request-schema.json']}, include_package_data=True, install_requires=['jsonschema'], tests_require=['tox'], classifiers=[ 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Add python versions to Trove classifiers
Add python versions to Trove classifiers
Python
mit
bcb/jsonrpcserver
60b8e2ce0196b44321d847f6610a29218ebb91b2
setup.py
setup.py
from setuptools import find_packages import os.path as op try: from setuptools import setup except ImportError: from distutils.core import setup here = op.abspath(op.dirname(__file__)) # Get metadata from the AFQ/version.py file: ver_file = op.join(here, 'AFQ', 'version.py') with open(ver_file) as f: exec(f.read()) REQUIRES = [] with open(op.join(here, 'requirements.txt')) as f: l = f.readline()[:-1] while l: REQUIRES.append(l) l = f.readline()[:-1] with open(op.join(here, 'README.md'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() opts = dict(name=NAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, long_description=LONG_DESCRIPTION, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, classifiers=CLASSIFIERS, author=AUTHOR, author_email=AUTHOR_EMAIL, platforms=PLATFORMS, packages=find_packages(), install_requires=REQUIRES, scripts=SCRIPTS, version=VERSION, python_requires=PYTHON_REQUIRES) if __name__ == '__main__': setup(**opts)
from setuptools import find_packages import os.path as op try: from setuptools import setup except ImportError: from distutils.core import setup here = op.abspath(op.dirname(__file__)) # Get metadata from the AFQ/version.py file: ver_file = op.join(here, 'AFQ', 'version.py') with open(ver_file) as f: exec(f.read()) REQUIRES = [] with open(op.join(here, 'requirements.txt')) as f: ll = f.readline()[:-1] while ll: REQUIRES.append(l) ll = f.readline()[:-1] with open(op.join(here, 'README.md'), encoding='utf-8') as f: LONG_DESCRIPTION = f.read() opts = dict(name=NAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, long_description=LONG_DESCRIPTION, url=URL, download_url=DOWNLOAD_URL, license=LICENSE, classifiers=CLASSIFIERS, author=AUTHOR, author_email=AUTHOR_EMAIL, platforms=PLATFORMS, packages=find_packages(), install_requires=REQUIRES, scripts=SCRIPTS, version=VERSION, python_requires=PYTHON_REQUIRES) if __name__ == '__main__': setup(**opts)
Make this one-char variable name a two-char.
Make this one-char variable name a two-char.
Python
bsd-2-clause
yeatmanlab/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ,arokem/pyAFQ
a334611a1bb159489d06a4e67fc0b7f4690b1701
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ gifshare - A command-line tool to upload images to S3. Run with `python setup.py install` to install gifshare into your default Python environment. """ from setuptools import setup import os.path HERE = os.path.dirname(__file__) setup( name="gifshare", version="0.0.4", description="Store images in S3", long_description=__doc__, author='Mark Smith', author_email='mark.smith@practicalpoetry.co.uk', url='https://github.com/judy2k/gifshare', license='MIT License', entry_points={ 'console_scripts': [ 'gifshare = gifshare:main', ] }, packages=['gifshare'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], install_requires=open( os.path.join(HERE, 'requirements/_base.txt') ).readlines(), zip_safe=False, )
#!/usr/bin/env python # -*- coding: utf-8 -*- """ gifshare - A command-line tool to upload images to S3. """ from setuptools import setup import os.path HERE = os.path.dirname(__file__) setup( name="gifshare", version="0.0.4", description="Store images in S3", long_description=__doc__, author='Mark Smith', author_email='mark.smith@practicalpoetry.co.uk', url='https://github.com/judy2k/gifshare', license='MIT License', entry_points={ 'console_scripts': [ 'gifshare = gifshare:main', ] }, packages=['gifshare'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], install_requires=open( os.path.join(HERE, 'requirements/_base.txt') ).readlines(), zip_safe=False, )
Remove install information because it's picked up by PyPI
Remove install information because it's picked up by PyPI
Python
mit
judy2k/gifshare
ef7f0090bfb7f37fa584123520b02f69a3a392a0
setup.py
setup.py
# # Copyright 2013 by Arnold Krille <arnold@arnoldarts.de> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from distutils.core import setup setup( name="workout", version="0.2.0", description="Store and display workout-data from FIT-files in mezzanine.", author="Arnold Krille", author_email="arnold@arnoldarts.de", url="http://github.com/kampfschlaefer/mezzanine-workout", license=open('LICENSE', 'r').read(), packages=['workout'], package_data={'workout': ['templates/workout/*']}, install_requires=['fitparse==0.0.1-dev'], dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'], )
# # Copyright 2013 by Arnold Krille <arnold@arnoldarts.de> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from distutils.core import setup setup( name="workout", version="0.2.1", description="Store and display workout-data from FIT-files in mezzanine.", author="Arnold Krille", author_email="arnold@arnoldarts.de", url="http://github.com/kampfschlaefer/mezzanine-workout", license=open('LICENSE', 'r').read(), packages=['workout'], package_data={'workout': ['templates/workout/*', 'static/*']}, install_requires=['fitparse==0.0.1-dev'], dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'], )
Fix inclusion of static files into the package
Fix inclusion of static files into the package and increase the version-number a bit.
Python
apache-2.0
kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout
0e167005c4a948aa101684b41b8c40c2523b1628
setup.py
setup.py
from setuptools import setup BLOCKS = [ 'mentoring = mentoring:MentoringBlock', 'answer = mentoring:AnswerBlock', 'quizz = mentoring:QuizzBlock', 'tip = mentoring:QuizzTipBlock', ] setup( name='xblock-mentoring', version='0.1', description='XBlock - Mentoring', packages=['mentoring'], entry_points={ 'xblock.v1': BLOCKS, 'xmodule.v1': BLOCKS, } )
from setuptools import setup BLOCKS = [ 'mentoring = mentoring:MentoringBlock', 'answer = mentoring:AnswerBlock', 'quizz = mentoring:QuizzBlock', 'tip = mentoring:QuizzTipBlock', ] setup( name='xblock-mentoring', version='0.1', description='XBlock - Mentoring', packages=['mentoring'], entry_points={ 'xblock.v1': BLOCKS, } )
Remove hack that registered the XBlock as a XModule for the LMS
Remove hack that registered the XBlock as a XModule for the LMS
Python
agpl-3.0
gsehub/xblock-mentoring,open-craft/xblock-mentoring,gsehub/xblock-mentoring,gsehub/xblock-mentoring,proversity-org/problem-builder,proversity-org/problem-builder,edx-solutions/xblock-mentoring,proversity-org/problem-builder,edx-solutions/xblock-mentoring,open-craft/xblock-mentoring,open-craft/xblock-mentoring,edx-solutions/xblock-mentoring
09e8511e54a8fc5a558281f4af42b4540293f0d1
setup.py
setup.py
from setuptools import setup, find_packages version = '0.6.1' setup( name='ckanext-oaipmh', version=version, description="OAI-PMH server and harvester for CKAN", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='CSC - IT Center for Science Ltd.', author_email='kata-project@postit.csc.fi', url='https://github.com/kata-csc/ckanext-oaipmh', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['ckanext', 'ckanext.oaipmh'], include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- 'pyoai', 'ckanext-harvest', 'lxml', 'rdflib', 'beautifulsoup4', 'pointfree', 'functionally', 'fn', ], entry_points=""" [ckan.plugins] oaipmh=ckanext.oaipmh.plugin:OAIPMHPlugin oaipmh_harvester=ckanext.oaipmh.harvester:OAIPMHHarvester ida_harvester=ckanext.oaipmh.ida:IdaHarvester cmdi_harvester=ckanext.oaipmh.cmdi:CMDIHarvester """, )
from setuptools import setup, find_packages version = '0.6.2' setup( name='ckanext-oaipmh', version=version, description="OAI-PMH server and harvester for CKAN", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='CSC - IT Center for Science Ltd.', author_email='kata-project@postit.csc.fi', url='https://github.com/kata-csc/ckanext-oaipmh', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages=['ckanext', 'ckanext.oaipmh'], include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- 'pyoai', 'ckanext-harvest', 'lxml', 'rdflib', 'beautifulsoup4', 'pointfree', 'functionally', 'fn', ], entry_points=""" [ckan.plugins] oaipmh=ckanext.oaipmh.plugin:OAIPMHPlugin oaipmh_harvester=ckanext.oaipmh.harvester:OAIPMHHarvester ida_harvester=ckanext.oaipmh.ida:IdaHarvester cmdi_harvester=ckanext.oaipmh.cmdi:CMDIHarvester """, )
Update version number to 0.6.2
Update version number to 0.6.2
Python
agpl-3.0
kata-csc/ckanext-oaipmh,kata-csc/ckanext-oaipmh,kata-csc/ckanext-oaipmh
4f889e4f42d831f2a12fccdd1956f77a8032563c
setup.py
setup.py
from setuptools import setup setup(name='azurerm', version='0.5.3', description='Azure Resource Manager REST wrappers', url='http://github.com/gbowerman/azurerm', author='sendmarsh', author_email='guybo@outlook.com', license='MIT', packages=['azurerm'], install_requires=[ 'adal', 'requests', ], zip_safe=False)
from setuptools import setup setup(name='azurerm', version='0.5.4', description='Azure Resource Manager REST wrappers', url='http://github.com/gbowerman/azurerm', author='sendmarsh', author_email='guybo@outlook.com', license='MIT', packages=['azurerm'], install_requires=[ 'adal', 'requests', ], zip_safe=False)
Update with new azurerm version
Update with new azurerm version
Python
mit
gbowerman/azurerm
055d9e2b74e997d6f744c15fe1b1e19bcd3203d4
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='ppp_questionparsing_grammatical', version='0.4.7', description='Natural language processing module for the PPP.', url='https://github.com/ProjetPP/PPP-QuestionParsing-Grammatical', author='Projet Pensées Profondes', author_email='ppp2014@listes.ens-lyon.fr', license='MIT', classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Development Status :: 1 - Planning', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'Topic :: Software Development :: Libraries', ], install_requires=[ 'ppp_datamodel>=0.5,<0.6', 'ppp_libmodule>=0.6,<0.7', 'jsonrpclib-pelix', 'nltk' ], packages=[ 'ppp_questionparsing_grammatical', 'ppp_questionparsing_grammatical.data', ], ) import sys if 'install' in sys.argv: import nltk nltk.download("wordnet")
#!/usr/bin/env python3 from setuptools import setup, find_packages setup( name='ppp_questionparsing_grammatical', version='0.4.7', description='Natural language processing module for the PPP.', url='https://github.com/ProjetPP/PPP-QuestionParsing-Grammatical', author='Projet Pensées Profondes', author_email='ppp2014@listes.ens-lyon.fr', license='MIT', classifiers=[ 'Environment :: No Input/Output (Daemon)', 'Development Status :: 1 - Planning', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', 'Topic :: Software Development :: Libraries', ], install_requires=[ 'ppp_datamodel>=0.5,<0.7', 'ppp_libmodule>=0.6,<0.8', 'jsonrpclib-pelix', 'nltk' ], packages=[ 'ppp_questionparsing_grammatical', 'ppp_questionparsing_grammatical.data', ], ) import sys if 'install' in sys.argv: import nltk nltk.download("wordnet")
Fix version numbers of libraries.
Fix version numbers of libraries.
Python
agpl-3.0
ProjetPP/PPP-QuestionParsing-Grammatical,ProjetPP/PPP-QuestionParsing-Grammatical
ea0058f3c360f8b1f847edab77aa650761ea8e77
setup.py
setup.py
from setuptools import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name="xonsh-apt-tabcomplete", version="0.1.3", license="BSD", url="https://github.com/DangerOnTheRanger/xonsh-apt-tabcomplete", download_url="https://github.com/DangerOnTheRanger/xonsh-apt-tabcomplete/tarball/v0.1.3", description="APT tabcomplete support for the Xonsh shell", long_description=long_description, author="Kermit Alexander II", author_email="tuxfreak@tuxfamily.org", packages=['xontrib'], package_dir={'xontrib': 'xontrib'}, package_data={'xontrib': ['*.xsh']}, zip_safe=False, classifiers=[ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Environment :: Plugins", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Programming Language :: Python" ] )
from setuptools import setup try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') except(IOError, ImportError): long_description = open('README.md').read() setup( name="xonsh-apt-tabcomplete", version="0.1.6", license="BSD", url="https://github.com/DangerOnTheRanger/xonsh-apt-tabcomplete", download_url="https://github.com/DangerOnTheRanger/xonsh-apt-tabcomplete/tarball/v0.1.6", description="APT tabcomplete support for the Xonsh shell", long_description=long_description, author="Kermit Alexander II", author_email="tuxfreak@tuxfamily.org", packages=['xontrib'], package_dir={'xontrib': 'xontrib'}, package_data={'xontrib': ['*.xsh']}, zip_safe=False, classifiers=[ "Development Status :: 2 - Pre-Alpha", "Environment :: Console", "Environment :: Plugins", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: BSD License", "Operating System :: POSIX", "Programming Language :: Python" ] )
Update patch-level version number, upload to pypi
Update patch-level version number, upload to pypi
Python
bsd-2-clause
DangerOnTheRanger/xonsh-apt-tabcomplete
bba064cc84bf7d0b7b4b7410a6a333af0609730c
setup.py
setup.py
from setuptools import setup, find_packages setup( name='autobuilder', version='1.0.2', packages=find_packages(), license='MIT', author='Matt Madison', author_email='matt@madison.systems', entry_points={ 'console_scripts': [ 'update-sstate-mirror = autobuilder.scripts.update_sstate_mirror:main', 'update-downloads = autobuilder.scripts.update_downloads:main', 'install-sdk = autobuilder.scripts.install_sdk:main', 'autorev-report = autobuilder.scripts.autorev_report:main' ] }, include_package_data=True, package_data={ 'autobuilder': ['templates/*.txt'] }, install_requires=['buildbot[tls]>=1.4.0', 'buildbot-worker>=1.4.0', 'buildbot-www>=1.4.0', 'buildbot-console-view>=1.4.0', 'buildbot-grid-view>=1.4.0', 'buildbot-waterfall-view>=1.4.0' 'buildbot-badges>=1.4.0', 'boto3', 'botocore', 'twisted'] )
from setuptools import setup, find_packages setup( name='autobuilder', version='1.0.3', packages=find_packages(), license='MIT', author='Matt Madison', author_email='matt@madison.systems', entry_points={ 'console_scripts': [ 'update-sstate-mirror = autobuilder.scripts.update_sstate_mirror:main', 'update-downloads = autobuilder.scripts.update_downloads:main', 'install-sdk = autobuilder.scripts.install_sdk:main', 'autorev-report = autobuilder.scripts.autorev_report:main' ] }, include_package_data=True, package_data={ 'autobuilder': ['templates/*.txt'] }, install_requires=['buildbot[tls]>=1.4.0', 'buildbot-worker>=1.4.0', 'buildbot-www>=1.4.0', 'buildbot-console-view>=1.4.0', 'buildbot-grid-view>=1.4.0', 'buildbot-waterfall-view>=1.4.0' 'buildbot-badges>=1.4.0', 'boto3', 'botocore', 'treq', 'twisted'] )
Add 'treq' as a requirement for GitHubStatusPush.
Add 'treq' as a requirement for GitHubStatusPush.
Python
mit
madisongh/autobuilder
5c8754aefa0a0b2f9e49d95970475a66a6de9510
start.py
start.py
from core.computer import Computer from time import sleep from console import start as start_console # Initialize computer instance computer = Computer() computer.start_monitoring() computer.processor.start_monitoring() for mem in computer.nonvolatile_memory: mem.start_monitoring() computer.virtual_memory.start_monitoring() # Start console interface start_console(computer) # Shutdown computer.processor.stop_monitoring() for mem in computer.nonvolatile_memory: mem.stop_monitoring() computer.virtual_memory.stop_monitoring() sleep(1)
from core.computer import Computer from time import sleep from console import start as start_console # Initialize computer instance computer = Computer() computer.start_monitoring() computer.processor.start_monitoring() for mem in computer.nonvolatile_memory: mem.start_monitoring() computer.virtual_memory.start_monitoring() # Start console interface start_console(computer) # Shutdown computer.processor.stop_monitoring() for mem in computer.nonvolatile_memory: mem.stop_monitoring() computer.virtual_memory.stop_monitoring() computer.stop_monitoring() sleep(1)
Stop monitoring computer on shutdown.
Stop monitoring computer on shutdown.
Python
bsd-3-clause
uzumaxy/pyspectator
340cbe542b89515033b6da40cf6cd6f761cfba9f
src/constants.py
src/constants.py
#!/usr/bin/env python TRAJECTORY = 'linear' CONTROLLER = 'pid' if TRAJECTORY == 'linear': SIMULATION_TIME_IN_SECONDS = 80.0 elif TRAJECTORY == 'circular': SIMULATION_TIME_IN_SECONDS = 120.0 elif TRAJECTORY == 'squared': SIMULATION_TIME_IN_SECONDS = 160.0 DELTA_T = 0.1 # this is the sampling time STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T) # control constants K_X = 0.90 K_Y = 0.90 K_THETA = 0.90 # PID control constants K_P_V = 0.2 K_I_V = 1.905 K_D_V = 0.00 K_P_W = 0.45 K_I_W = 1.25 K_D_W = 0.000
#!/usr/bin/env python TRAJECTORY = 'linear' CONTROLLER = 'pid' if TRAJECTORY == 'linear': SIMULATION_TIME_IN_SECONDS = 60.0 elif TRAJECTORY == 'circular': SIMULATION_TIME_IN_SECONDS = 120.0 elif TRAJECTORY == 'squared': SIMULATION_TIME_IN_SECONDS = 160.0 DELTA_T = 0.1 # this is the sampling time STEPS = int(SIMULATION_TIME_IN_SECONDS / DELTA_T) # control constants K_X = 0.90 K_Y = 0.90 K_THETA = 0.90 # PID control constants K_P_V = 0.2 K_I_V = 1.905 K_D_V = 0.00 K_P_W = 0.45 K_I_W = 1.25 K_D_W = 0.000
Change simulation time of linear trajectory to 60 seconds
Change simulation time of linear trajectory to 60 seconds
Python
mit
bit0001/trajectory_tracking,bit0001/trajectory_tracking
746420daec76bf605f0da57902bb60b2cb17c87d
bcbio/bed/__init__.py
bcbio/bed/__init__.py
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: # move to a .bed extension for downstream tools if not already sorted_bed = catted.sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted def minimize(bed_file): """ strip a BED file down to its three necessary columns: chrom start end """ return bt.BedTool(bed_file).cut(range(3))
import pybedtools as bt import six def concat(bed_files, catted=None): """ recursively concat a set of BED files, returning a sorted bedtools object of the result """ bed_files = [x for x in bed_files if x] if len(bed_files) == 0: if catted: # move to a .bed extension for downstream tools if not already sorted_bed = catted.sort() if not sorted_bed.fn.endswith(".bed"): return sorted_bed.moveto(sorted_bed.fn + ".bed") else: return sorted_bed else: return catted if not catted: bed_files = list(bed_files) catted = bt.BedTool(bed_files.pop()) else: catted = catted.cat(bed_files.pop(), postmerge=False, force_truncate=False) return concat(bed_files, catted) def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted def minimize(bed_file): """ strip a BED file down to its three necessary columns: chrom start end """ if not bed_file: return bed_file else: return bt.BedTool(bed_file).cut(range(3))
Return None if no bed file exists to be opened.
Return None if no bed file exists to be opened.
Python
mit
guillermo-carrasco/bcbio-nextgen,biocyberman/bcbio-nextgen,lbeltrame/bcbio-nextgen,chapmanb/bcbio-nextgen,mjafin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,vladsaveliev/bcbio-nextgen,brainstorm/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lpantano/bcbio-nextgen,mjafin/bcbio-nextgen,gifford-lab/bcbio-nextgen,vladsaveliev/bcbio-nextgen,gifford-lab/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,biocyberman/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,biocyberman/bcbio-nextgen,mjafin/bcbio-nextgen,a113n/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,gifford-lab/bcbio-nextgen,brainstorm/bcbio-nextgen,lpantano/bcbio-nextgen,chapmanb/bcbio-nextgen,brainstorm/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,lpantano/bcbio-nextgen
8d4f3f63b3d38156759319d32a63d34413c77a71
steel/fields/mixin.py
steel/fields/mixin.py
from gettext import gettext as _ class Fixed: _("A mixin that ensures the presence of a predetermined value") def __init__(self, value, *args, **kwargs): self.value = value super(Fixed, self).__init__(*args, **kwargs) def encode(self, value): # Always encode the fixed value return super(Fixed, self).encode(self.value) def decode(self, value): value = super(Fixed, self).decode(value) # Check that the value matches what it should be if value != self.value: raise ValueError("Expected %r, got %r" % (self.value, value)) return value
from gettext import gettext as _ class Fixed: _("A mixin that ensures the presence of a predetermined value") def __init__(self, value, *args, **kwargs): self.value = value super(Fixed, self).__init__(*args, **kwargs) def encode(self, value): # Always encode the fixed value return super(Fixed, self).encode(self.value) def decode(self, value): value = super(Fixed, self).decode(value) # Check that the value matches what it should be if value != self.value: raise ValueError(_("Expected %r, got %r" % (self.value, value))) return value
Mark another string for translation
Mark another string for translation
Python
bsd-3-clause
gulopine/steel-experiment
1d448b65840509c5f21abb7f5ad65a6ce20b139c
packs/travisci/actions/lib/action.py
packs/travisci/actions/lib/action.py
from st2actions.runners.pythonrunner import Action import requests class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _init_header(self): travis_header = { 'User_Agent': self.config['User-Agent'], 'Accept': self.config['Accept'], 'Host': self.config['Host'], } return travis_header def _auth_header(self): _HEADERS = self._init_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] return _HEADERS def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: _HEADERS = self._auth_header() else: _HEADERS = self._init_header() response = requests.get(uri, headers=_HEADERS) elif method == "POST": _HEADERS = self._auth_header response = requests.post(uri, headers=_HEADERS) elif method == "PUT": _HEADERS = self._auth_header() _HEADERS['Authorization'] = self.config["Authorization"] _HEADERS['Content-Type'] = self.config["Content-Type"] response = requests.put(uri, data=data, headers=_HEADERS) return response
import requests from st2actions.runners.pythonrunner import Action API_URL = 'https://api.travis-ci.org' HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json' HEADERS_HOST = '' class TravisCI(Action): def __init__(self, config): super(TravisCI, self).__init__(config) def _get_auth_headers(self): headers = {} headers['Authorization'] = self.config["Authorization"] headers['Content-Type'] = self.config["Content-Type"] return headers def _perform_request(self, uri, method, data=None, requires_auth=False): if method == "GET": if requires_auth: headers = self._get_auth_headers() else: headers = {} response = requests.get(uri, headers=headers) elif method == 'POST': headers = self._get_auth_headers() response = requests.post(uri, headers=headers) elif method == 'PUT': headers = self._get_auth_headers() response = requests.put(uri, data=data, headers=headers) return response
Remove unnecessary values from the config - those should just be constants.
Remove unnecessary values from the config - those should just be constants.
Python
apache-2.0
StackStorm/st2contrib,StackStorm/st2contrib,pidah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,digideskio/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,armab/st2contrib,armab/st2contrib,digideskio/st2contrib,pidah/st2contrib,armab/st2contrib,psychopenguin/st2contrib
ea8d96219748bacc7bedbb93f5e20f3648d5748a
microscopes/lda/testutil.py
microscopes/lda/testutil.py
"""Test helpers specific to LDA """ import numpy as np import itertools as it from microscopes.common.testutil import permutation_iter def toy_dataset(defn): """Generate a toy variadic dataset for HDP-LDA """ lengths = 1 + np.random.poisson(lam=1.0, size=defn.n) def mkrow(nwords): return np.random.choice(range(defn.v), size=nwords) return map(mkrow, lengths) def permutations(doclengths): """Generate a permutation of XXX WARNING: very quickly becomes intractable """ perms = [permutation_iter(length) for length in doclengths] for prod in it.product(*perms): dishes = sum([max(x) + 1 for x in prod]) for p in permutation_iter(dishes): idx = 0 ret = [] for d in prod: ntables = max(d) + 1 ret.append(tuple(p[idx:idx+ntables])) idx += ntables yield prod, tuple(ret)
"""Test helpers specific to LDA """ import numpy as np import itertools as it from microscopes.common.testutil import permutation_iter def toy_dataset(defn): """Generate a toy variadic dataset for HDP-LDA """ lengths = 1 + np.random.poisson(lam=10, size=defn.n) def mkrow(nwords): return np.random.choice(range(defn.v), size=nwords) return map(mkrow, lengths) def permutations(doclengths): """Generate a permutation of XXX WARNING: very quickly becomes intractable """ perms = [permutation_iter(length) for length in doclengths] for prod in it.product(*perms): dishes = sum([max(x) + 1 for x in prod]) for p in permutation_iter(dishes): idx = 0 ret = [] for d in prod: ntables = max(d) + 1 ret.append(tuple(p[idx:idx+ntables])) idx += ntables yield prod, tuple(ret)
Increase size of test docs
Increase size of test docs
Python
bsd-3-clause
datamicroscopes/lda,datamicroscopes/lda,datamicroscopes/lda
8232832bbae07c485b5600542c09c5eb5adef7ff
ceilometer/service.py
ceilometer/service.py
#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright © 2012 eNovance <licensing@enovance.com> # # Author: Julien Danjou <julien@danjou.info> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from ceilometer import log from ceilometer.openstack.common import cfg cfg.CONF.register_opts([ cfg.IntOpt('periodic_interval', default=60, help='seconds between running periodic tasks') ]) def prepare_service(argv=[]): cfg.CONF(argv[1:]) log.setup()
#!/usr/bin/env python # -*- encoding: utf-8 -*- # # Copyright © 2012 eNovance <licensing@enovance.com> # # Author: Julien Danjou <julien@danjou.info> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import flags from ceilometer import log from ceilometer.openstack.common import cfg cfg.CONF.register_opts([ cfg.IntOpt('periodic_interval', default=60, help='seconds between running periodic tasks') ]) def prepare_service(argv=[]): cfg.CONF(argv[1:]) # FIXME(dhellmann): We must set up the nova.flags module in order # to have the RPC and DB access work correctly because we are # still using the Service object out of nova directly. We need to # move that into openstack.common. flags.FLAGS(argv[1:]) log.setup()
Configure nova.flags as well as openstack.common.cfg
Configure nova.flags as well as openstack.common.cfg Because we are using nova classes directly (Service, Manager, etc.) we need to initialize the configuration modules that those classes use. Change-Id: Idafd4a8346fc59332114ea7536893470bf9eaff8
Python
apache-2.0
eayunstack/ceilometer,citrix-openstack-build/ceilometer,rickerc/ceilometer_audit,r-mibu/ceilometer,chungg/aodh,sileht/aodh,m1093782566/openstack_org_ceilometer,citrix-openstack-build/ceilometer,pczerkas/aodh,froyobin/ceilometer,luogangyi/Ceilometer-oVirt,idegtiarov/ceilometer,maestro-hybrid-cloud/ceilometer,tanglei528/ceilometer,NeCTAR-RC/ceilometer,dreamhost/ceilometer,NeCTAR-RC/ceilometer,fabian4/ceilometer,cernops/ceilometer,rackerlabs/instrumented-ceilometer,openstack/ceilometer,ityaptin/ceilometer,rickerc/ceilometer_audit,maestro-hybrid-cloud/ceilometer,Juniper/ceilometer,froyobin/ceilometer,JioCloud/ceilometer,tanglei528/ceilometer,luogangyi/Ceilometer-oVirt,ityaptin/ceilometer,JioCloud/ceilometer,mathslinux/ceilometer,tanglei528/ceilometer,rackerlabs/instrumented-ceilometer,m1093782566/openstack_org_ceilometer,rackerlabs/instrumented-ceilometer,sileht/aodh,MisterPup/Ceilometer-Juno-Extension,mathslinux/ceilometer,shootstar/ctest,JioCloud/ceilometer,citrix-openstack-build/ceilometer,dreamhost/ceilometer,openstack/aodh,cernops/ceilometer,NeCTAR-RC/ceilometer,pkilambi/ceilometer,Juniper/ceilometer,redhat-openstack/ceilometer,shootstar/ctest,idegtiarov/ceilometer,rickerc/ceilometer_audit,chungg/aodh,openstack/aodh,r-mibu/ceilometer,redhat-openstack/ceilometer,pkilambi/ceilometer,isyippee/ceilometer,MisterPup/Ceilometer-Juno-Extension,fabian4/ceilometer,eayunstack/ceilometer,pczerkas/aodh,isyippee/ceilometer,openstack/ceilometer
ed8a5b8f34614997a13cdcda03dc4988c1cf4090
urls.py
urls.py
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'okupy.views.home', name='home'), # url(r'^okupy/', include('okupy.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), url(r'^$', mylogin) )
from django.conf.urls.defaults import patterns, include, url from django.contrib import admin from okupy.login.views import * admin.autodiscover() urlpatterns = patterns('', url(r'^$', mylogin) url(r'^admin/', include(admin.site.urls)), )
Remove comments, properly enable admin panel
Remove comments, properly enable admin panel
Python
agpl-3.0
gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org
68eb1bd58b84c1937f6f8d15bb9ea9f02a402e22
tests/cdscommon.py
tests/cdscommon.py
import hashlib import os import shutil import cdsapi SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'} def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'): request_text = str(sorted(request.items())).encode('utf-8') uuid = hashlib.sha3_224(request_text).hexdigest()[:10] format = request.get('format', 'grib') ext = EXTENSIONS.get(format, '.bin') name = name.format(**locals()) path = os.path.join(SAMPLE_DATA_FOLDER, name) if not os.path.exists(path): c = cdsapi.Client() try: c.retrieve(dataset, request, target=path + '.tmp') shutil.move(path + '.tmp', path) except: os.unlink(path + '.tmp') raise return path def message_count(dataset, request, count=1): if dataset == 'reanalysis-era5-single-levels' \ and request.get('product_type') == 'ensemble_members': count = 20 elif dataset == 'reanalysis-era5-pressure-levels' \ and request.get('product_type') == 'ensemble_members': count = 20 elif dataset == 'seasonal-original-single-levels': count = 51 elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean': count = 51 for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']: value = request.get(key) if isinstance(value, list): count *= len(value) return count
import hashlib import os import shutil import cdsapi SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data') EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'} def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'): request_text = str(sorted(request.items())).encode('utf-8') uuid = hashlib.sha3_224(request_text).hexdigest()[:10] format = request.get('format', 'grib') ext = EXTENSIONS.get(format, '.bin') name = name.format(**locals()) path = os.path.join(SAMPLE_DATA_FOLDER, name) if not os.path.exists(path): c = cdsapi.Client() try: c.retrieve(dataset, request, target=path + '.tmp') shutil.move(path + '.tmp', path) except: os.unlink(path + '.tmp') raise return path
Drop impossible to get right code.
Drop impossible to get right code.
Python
apache-2.0
ecmwf/cfgrib
db6b869eae416e72fa30b1d7271b0ed1d7dc1a55
sqlalchemy_json/__init__.py
sqlalchemy_json/__init__.py
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
from sqlalchemy.ext.mutable import ( Mutable, MutableDict) from sqlalchemy_utils.types.json import JSONType from . track import ( TrackedDict, TrackedList) __all__ = 'MutableJson', 'NestedMutableJson' class NestedMutableDict(TrackedDict, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, dict): return cls(value) return super(cls).coerce(key, value) class NestedMutableList(TrackedList, Mutable): @classmethod def coerce(cls, key, value): if isinstance(value, cls): return value if isinstance(value, list): return cls(value) return super(cls).coerce(key, value) class NestedMutable(Mutable): """SQLAlchemy `mutable` extension with nested change tracking.""" @classmethod def coerce(cls, key, value): """Convert plain dictionary to NestedMutable.""" if value is None: return value if isinstance(value, cls): return value if isinstance(value, dict): return NestedMutableDict.coerce(key, value) if isinstance(value, list): return NestedMutableList.coerce(key, value) return super(cls).coerce(key, value) class MutableJson(JSONType): """JSON type for SQLAlchemy with change tracking at top level.""" class NestedMutableJson(JSONType): """JSON type for SQLAlchemy with nested change tracking.""" MutableDict.associate_with(MutableJson) NestedMutable.associate_with(NestedMutableJson)
Fix error when setting JSON value to be `None`
Fix error when setting JSON value to be `None` Previously this would raise an attribute error as `None` does not have the `coerce` attribute.
Python
bsd-2-clause
edelooff/sqlalchemy-json
edf95105b7522b115dd4d3882ed57e707126c6af
timepiece/admin.py
timepiece/admin.py
from django.contrib import admin from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType class PermissionAdmin(admin.ModelAdmin): list_display = ['__unicode__', 'codename'] list_filter = ['content_type__app_label'] class ContentTypeAdmin(admin.ModelAdmin): list_display = ['id', 'app_label', 'model'] list_filter = ['app_label'] admin.site.register(Permission, PermissionAdmin) admin.site.register(ContentType, ContentTypeAdmin)
from django.contrib import admin from django.contrib.auth.models import Permission from django.contrib.contenttypes.models import ContentType class PermissionAdmin(admin.ModelAdmin): list_display = ['content_type', 'codename', 'name'] list_filter = ['content_type__app_label'] class ContentTypeAdmin(admin.ModelAdmin): list_display = ['id', 'app_label', 'model'] list_filter = ['app_label'] admin.site.register(Permission, PermissionAdmin) admin.site.register(ContentType, ContentTypeAdmin)
Update Python/Django: Remove unnecessary reference to __unicode__
Update Python/Django: Remove unnecessary reference to __unicode__
Python
mit
BocuStudio/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,BocuStudio/django-timepiece,caktus/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece
20017da43fe1bf5287b33d9d2fc7f597850bb5b5
readthedocs/settings/proxito/base.py
readthedocs/settings/proxito/base.py
""" Base settings for Proxito Some of these settings will eventually be backported into the main settings file, but currently we have them to be able to run the site with the old middleware for a staged rollout of the proxito code. """ class CommunityProxitoSettingsMixin: ROOT_URLCONF = 'readthedocs.proxito.urls' USE_SUBDOMAIN = True @property def MIDDLEWARE(self): # noqa # Use our new middleware instead of the old one classes = super().MIDDLEWARE classes = list(classes) index = classes.index( 'readthedocs.core.middleware.SubdomainMiddleware' ) classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware' middleware_to_remove = ( 'readthedocs.core.middleware.SingleVersionMiddleware', 'csp.middleware.CSPMiddleware', ) for mw in middleware_to_remove: if mw in classes: classes.remove(mw) return classes
""" Base settings for Proxito Some of these settings will eventually be backported into the main settings file, but currently we have them to be able to run the site with the old middleware for a staged rollout of the proxito code. """ class CommunityProxitoSettingsMixin: ROOT_URLCONF = 'readthedocs.proxito.urls' USE_SUBDOMAIN = True @property def DATABASES(self): # This keeps connections to the DB alive, # which reduces latency with connecting to postgres dbs = getattr(super(), 'DATABASES', {}) for db in dbs.keys(): dbs[db]['CONN_MAX_AGE'] = 86400 return dbs @property def MIDDLEWARE(self): # noqa # Use our new middleware instead of the old one classes = super().MIDDLEWARE classes = list(classes) index = classes.index( 'readthedocs.core.middleware.SubdomainMiddleware' ) classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware' middleware_to_remove = ( 'readthedocs.core.middleware.SingleVersionMiddleware', 'csp.middleware.CSPMiddleware', ) for mw in middleware_to_remove: if mw in classes: classes.remove(mw) return classes
Expand the logic in our proxito mixin.
Expand the logic in our proxito mixin. This makes proxito mixin match production for .com/.org in the areas where we are overriding the same things.
Python
mit
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
b021fa0335414d3693aabf4c32b7219f0ba33369
holviapi/tests/test_api_idempotent.py
holviapi/tests/test_api_idempotent.py
# -*- coding: utf-8 -*- import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code
# -*- coding: utf-8 -*- import os import pytest import holviapi @pytest.fixture def connection(): pool = os.environ.get('HOLVI_POOL', None) key = os.environ.get('HOLVI_KEY', None) if not pool or not key: raise RuntimeError("HOLVI_POOL and HOLVI_KEY must be in ENV for these tests") cnc = holviapi.Connection(pool,key) return cnc @pytest.fixture def invoiceapi(): cnc = connection() ia = holviapi.InvoiceAPI(cnc) return ia @pytest.fixture def categoriesapi(): cnc = connection() ca = holviapi.CategoriesAPI(cnc) return ca def test_list_invoices(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice def test_get_invoice(invoiceapi): l = invoiceapi.list_invoices() i = next(l) assert type(i) == holviapi.Invoice i2 = invoiceapi.get_invoice(i.code) assert i.code == i2.code def test_list_income_categories(categoriesapi): l = categoriesapi.list_income_categories() c = next(l) assert type(c) == holviapi.IncomeCategory def test_list_expense_categories(categoriesapi): l = categoriesapi.list_expense_categories() c = next(l) assert type(c) == holviapi.ExpenseCategory def test_get_category(categoriesapi): l = categoriesapi.list_income_categories() c = next(l) assert type(c) == holviapi.IncomeCategory c2 = categoriesapi.get_category(c.code) assert c.code == c2.code
Add more tests against live API
Add more tests against live API
Python
mit
rambo/python-holviapi,rambo/python-holviapi
183548fcd094482e1ef3147e530d7eedac4685be
launch_control/commands/dispatcher.py
launch_control/commands/dispatcher.py
""" Module with LaunchControlDispatcher - the command dispatcher """ import argparse from .interface import Command class LaunchControlDispatcher(object): """ Class implementing command line interface for launch control """ def __init__(self): self.parser = argparse.ArgumentParser( description=""" Command line tool for interacting with Launch Control """, epilog=""" Please report all bugs using the Launchpad bug tracker: http://bugs.launchpad.net/launch-control/+filebug """, add_help=False) self.subparsers = self.parser.add_subparsers(title="Sub-command to invoke") for command_cls in Command.get_subclasses(): sub_parser = self.subparsers.add_parser( command_cls.get_name(), help=command_cls.get_help()) sub_parser.set_defaults(command_cls=command_cls) command_cls.register_arguments(sub_parser) def dispatch(self): args = self.parser.parse_args() command = args.command_cls(self.parser, args) command.invoke(args) def main(): LaunchControlDispatcher().dispatch()
""" Module with LaunchControlDispatcher - the command dispatcher """ import argparse from .interface import Command class LaunchControlDispatcher(object): """ Class implementing command line interface for launch control """ def __init__(self): self.parser = argparse.ArgumentParser( description=""" Command line tool for interacting with Launch Control """, epilog=""" Please report all bugs using the Launchpad bug tracker: http://bugs.launchpad.net/launch-control/+filebug """, add_help=False) self.subparsers = self.parser.add_subparsers(title="Sub-command to invoke") for command_cls in Command.get_subclasses(): sub_parser = self.subparsers.add_parser( command_cls.get_name(), help=command_cls.get_help()) sub_parser.set_defaults(command_cls=command_cls) command_cls.register_arguments(sub_parser) def dispatch(self, args=None): args = self.parser.parse_args(args) command = args.command_cls(self.parser, args) command.invoke(args) def main(): LaunchControlDispatcher().dispatch()
Add ability to pass explicit arguments to LaunchControlDispatcher
Add ability to pass explicit arguments to LaunchControlDispatcher
Python
agpl-3.0
OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server
ce639400d48462bdc593e20d13979c33ed4c7fe9
commands/globaladd.py
commands/globaladd.py
from devbot import chat def call(message: str, name, protocol, cfg, commands): if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
from devbot import chat def call(message: str, name, protocol, cfg, commands): if message is '': chat.say('/msg {} {}'.format(name, commands['help']['globaladd'].format('globaladd'))) return if ' ' in message: chat.say('/msg {} Sorry, that was not a valid player name: It contains spaces.'.format(name)) return chat.say('/msg {} Invited {} to GlobalChat'.format(name, message)) chat.say_wrap('/msg {}'.format(message), 'You have been added to global chat. Use /g GlobalChat to speak in the group, and /e to exit.') chat.say('/nlip GlobalChat {}'.format(message))
Fix missing command crash with gadd
Fix missing command crash with gadd
Python
mit
Ameliorate/DevotedBot,Ameliorate/DevotedBot
300e946cd72561c69141f65768debed9d0682abb
utils/run_tests.py
utils/run_tests.py
#!/usr/bin/env python """ Run Arista Transcode Tests ========================== Generate test files in various formats and transcode them to all available output devices and qualities. """ import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) import arista; arista.init() if not os.path.exists("tests"): os.system("./utils/generate_tests.py") files = os.listdir("tests") status = [] try: for id, device in arista.presets.get().items(): for file in files: print device.make + " " + device.model + ": " + file cmd = "./arista-transcode -q -d %s tests/%s test_output" % (id, file) print cmd ret = subprocess.call(cmd, shell=True) if ret: status.append([file, device, True]) else: status.append([file, device, False]) print "Report" print "======" for file, device, failed in status: if failed: print device.make + " " + device.model + " (" + \ file + "): Failed" else: print device.make + " " + device.model + " (" + \ file + "): Succeeded" print "Tests completed." except KeyboardInterrupt: pass
#!/usr/bin/env python """ Run Arista Transcode Tests ========================== Generate test files in various formats and transcode them to all available output devices and qualities. """ import os import subprocess import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) import arista; arista.init() if not os.path.exists("tests"): os.system("./utils/generate_tests.py") files = os.listdir("tests") status = [] try: for id, device in arista.presets.get().items(): for file in files: print device.make + " " + device.model + ": " + file cmd = "./arista-transcode -q -d %s -o test_output tests/%s" % (id, file) print cmd ret = subprocess.call(cmd, shell=True) if ret: status.append([file, device, True]) else: status.append([file, device, False]) except KeyboardInterrupt: pass print "Report" print "======" for file, device, failed in status: if failed: print device.make + " " + device.model + " (" + \ file + "): Failed" else: print device.make + " " + device.model + " (" + \ file + "): Succeeded" print "Tests completed."
Update test runner syntax to the new arista-transcode syntax and always output a status report even if the user stops the tests early.
Update test runner syntax to the new arista-transcode syntax and always output a status report even if the user stops the tests early.
Python
lgpl-2.1
danielgtaylor/arista,danielgtaylor/arista
b4e5a284201d6d25607ff54aedcf6082e8a4d621
st2client/st2client/models/reactor.py
st2client/st2client/models/reactor.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class Sensor(core.Resource): _plural = 'Sensortypes' _repr_attributes = ['name', 'pack'] class TriggerType(core.Resource): _alias = 'Trigger' _display_name = 'Trigger' _plural = 'Triggertypes' _plural_display_name = 'Triggers' _repr_attributes = ['name', 'pack'] class Rule(core.Resource): _plural = 'Rules'
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class Sensor(core.Resource): _plural = 'Sensortypes' _repr_attributes = ['name', 'pack'] class TriggerType(core.Resource): _alias = 'Trigger' _display_name = 'Trigger' _plural = 'Triggertypes' _plural_display_name = 'Triggers' _repr_attributes = ['name', 'pack'] class Trigger(core.Resource): _alias = 'TriggerSpecification' _display_name = 'TriggerSpecification' _plural = 'Triggers' _plural_display_name = 'Triggers' _repr_attributes = ['name', 'pack'] class Rule(core.Resource): _plural = 'Rules'
Add Trigger model to client and alias it as TriggerSpecification.
Add Trigger model to client and alias it as TriggerSpecification.
Python
apache-2.0
pinterb/st2,peak6/st2,pixelrebel/st2,jtopjian/st2,pixelrebel/st2,alfasin/st2,pinterb/st2,Itxaka/st2,Plexxi/st2,lakshmi-kannan/st2,Itxaka/st2,grengojbo/st2,Plexxi/st2,jtopjian/st2,punalpatel/st2,punalpatel/st2,Plexxi/st2,nzlosh/st2,armab/st2,StackStorm/st2,punalpatel/st2,dennybaa/st2,nzlosh/st2,pixelrebel/st2,peak6/st2,armab/st2,jtopjian/st2,alfasin/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,armab/st2,emedvedev/st2,pinterb/st2,dennybaa/st2,tonybaloney/st2,lakshmi-kannan/st2,dennybaa/st2,alfasin/st2,tonybaloney/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,peak6/st2,Itxaka/st2,grengojbo/st2,lakshmi-kannan/st2,grengojbo/st2,StackStorm/st2,StackStorm/st2
0903b18d1e4213cb88aa8cfcd0eb473ae54aa40b
shop/models/fields.py
shop/models/fields.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import connection from shop.apps import get_tuple_version try: if str(connection.vendor) == 'postgresql': import psycopg2 psycopg2_version = get_tuple_version(psycopg2.__version__[:5]) with connection.cursor() as cursor: cursor.execute("SELECT version()") row = cursor.fetchone()[:17] postgres_version = get_tuple_version(str(row[0][:17].split(' ')[1])) # To be able to use the Django version of JSONField, it requires to have # PostgreSQL ≥ 9.4 and psycopg2 ≥ 2.5.4, otherwise some issues could be faced. if (psycopg2_version) >= (2, 5, 4) and (postgres_version >= (9, 4)): from django.contrib.postgres.fields import JSONField as _JSONField else: raise ImportError else: raise ImportError except ImportError: from jsonfield.fields import JSONField as _JSONField class JSONField(_JSONField): def __init__(self, *args, **kwargs): kwargs.update({'default': {}}) super(JSONField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(JSONField, self).deconstruct() del kwargs['default'] return name, path, args, kwargs
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql': from django.contrib.postgres.fields import JSONField as _JSONField else: from jsonfield.fields import JSONField as _JSONField class JSONField(_JSONField): def __init__(self, *args, **kwargs): kwargs.update({'default': {}}) super(JSONField, self).__init__(*args, **kwargs) def deconstruct(self): name, path, args, kwargs = super(JSONField, self).deconstruct() del kwargs['default'] return name, path, args, kwargs
Fix and simplify the JSONfield wrapper code
Fix and simplify the JSONfield wrapper code
Python
bsd-3-clause
jrief/django-shop,awesto/django-shop,nimbis/django-shop,khchine5/django-shop,jrief/django-shop,nimbis/django-shop,awesto/django-shop,nimbis/django-shop,khchine5/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,divio/django-shop,awesto/django-shop,divio/django-shop,khchine5/django-shop,khchine5/django-shop,jrief/django-shop
e33b0e36d3e6dd7d374f1cf8846324275da0d331
tweets/models.py
tweets/models.py
from django.conf import settings from django.db import models class HashTag(models.Model): # The hash tag length can't be more than the body length minus the `#` text = models.CharField(max_length=139) def __str__(self): return self.text class Message(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="messages") text = models.CharField(max_length=140) created_at = models.DateTimeField(auto_now_add=True) stars = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="starred_messages", blank=True) tagged_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="messages_tagged_in", blank=True) hash_tags = models.ManyToManyField(HashTag, blank=True) def __str__(self): return self.text
from django.conf import settings from django.db import models class Hashtag(models.Model): # The hash tag length can't be more than the body length minus the `#` text = models.CharField(max_length=139) def __str__(self): return self.text class Message(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="messages") text = models.CharField(max_length=140) created_at = models.DateTimeField(auto_now_add=True) stars = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="starred_messages", blank=True) tagged_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name="messages_tagged_in", blank=True) hash_tags = models.ManyToManyField(HashTag, blank=True) def __str__(self): return self.text
Adjust hashtag to be consistently one word
Adjust hashtag to be consistently one word
Python
mit
pennomi/openwest2015-twitter-clone,pennomi/openwest2015-twitter-clone,pennomi/openwest2015-twitter-clone
458211091f4408136a4eb6e6a06849d93c3ede8a
tests/test_convert.py
tests/test_convert.py
import pytest # type: ignore from hypothesis import given from ppb_vector import Vector2 from utils import vector_likes, vectors class V(Vector2): pass @pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore @pytest.mark.parametrize('cls', [Vector2, V]) # type: ignore def test_convert_class(cls, vector_like): vector = cls.convert(vector_like) assert isinstance(vector, cls) assert vector == vector_like @given(vector=vectors()) def test_convert_tuple(vector: Vector2): assert vector == tuple(vector) == (vector.x, vector.y)
import pytest # type: ignore from hypothesis import given from ppb_vector import Vector2 from utils import vector_likes, vectors class V(Vector2): pass @pytest.mark.parametrize('vector_like', vector_likes(), ids=lambda x: type(x).__name__) # type: ignore @pytest.mark.parametrize('cls', [Vector2, V]) # type: ignore def test_convert_class(cls, vector_like): vector = cls.convert(vector_like) assert isinstance(vector, cls) assert vector == vector_like @given(vector=vectors()) def test_convert_tuple(vector: Vector2): assert vector == tuple(vector) == (vector.x, vector.y) @given(vector=vectors()) def test_convert_list(vector: Vector2): assert vector == list(vector) == [vector.x, vector.y]
Add a list conversion test
tests/convert: Add a list conversion test
Python
artistic-2.0
ppb/ppb-vector,ppb/ppb-vector
3739b760e2ddf2a741dd6b4683780ffbd11ef33c
osrframework/__init__.py
osrframework/__init__.py
# -*- coding: cp1252 -*- # ################################################################################## # # Copyright 2014-2017 Félix Brezo and Yaiza Rubio (i3visio, contacto@i3visio.com) # # OSRFramework is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################## import osrframework.utils.logger # Calling the logger when being imported osrframework.utils.logger.setupLogger(loggerName="osrframework") __version__="0.15.0rc7"
# -*- coding: cp1252 -*- # ################################################################################## # # Copyright 2014-2017 Félix Brezo and Yaiza Rubio (i3visio, contacto@i3visio.com) # # OSRFramework is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################## import osrframework.utils.logger # Calling the logger when being imported osrframework.utils.logger.setupLogger(loggerName="osrframework") __version__="0.15.0rc8"
Prepare 0.15.0rc8 to release it as 0.15.0
Prepare 0.15.0rc8 to release it as 0.15.0
Python
agpl-3.0
i3visio/osrframework
bd8901c18a6722660e7af742260ae4b8317a064b
youtube/tasks.py
youtube/tasks.py
import subprocess import os from pathlib import Path from invoke import task @task def update(ctx): """ Update youtube-dl """ cmd = ['pipenv', 'update', 'youtube-dl'] subprocess.call(cmd) @task def clean(ctx): """ Clean up files """ import main def rm(file_): if file_.exists(): os.remove(file_) rm(main.json_file) for file_ in main.download_dir.iterdir(): if file_.name != '.gitkeep': os.remove(file_) @task def playlist(ctx): """ Process YouTube playlist """ import main main.process_playlist() @task def link(ctx, url): """ Process video link """ import main main.process_link(url)
import subprocess import os from pathlib import Path from invoke import task @task def update(ctx): """ Update dependencies such as youtube-dl, etc. """ subprocess.call(['pipenv', 'update']) @task def clean(ctx): """ Clean up files """ import main def rm(file_): if file_.exists(): os.remove(file_) rm(main.json_file) for file_ in main.download_dir.iterdir(): if file_.name != '.gitkeep': os.remove(file_) @task def playlist(ctx): """ Process YouTube playlist """ import main main.process_playlist() @task def link(ctx, url): """ Process video link """ import main main.process_link(url)
Update task now updates all dependencies
Update task now updates all dependencies
Python
apache-2.0
feihong/chinese-music-processors,feihong/chinese-music-processors
ec6a8374fd0e1411b52bfcb12e2a8a599d025287
src/AppiumLibrary/keywords/_logging.py
src/AppiumLibrary/keywords/_logging.py
# -*- coding: utf-8 -*- import os import sys from robot.variables import GLOBAL_VARIABLES from robot.api import logger from keywordgroup import KeywordGroup class _LoggingKeywords(KeywordGroup): # Private def _debug(self, message): logger.debug(message) def _get_log_dir(self): logfile = GLOBAL_VARIABLES['${LOG FILE}'] if logfile != 'NONE': return os.path.dirname(logfile) return GLOBAL_VARIABLES['${OUTPUTDIR}'] def _html(self, message): logger.info(message, True, False) def _info(self, message): logger.info(message) def _log(self, message, level='INFO'): level = level.upper() if (level == 'INFO'): self._info(message) elif (level == 'DEBUG'): self._debug(message) elif (level == 'WARN'): self._warn(message) elif (level == 'HTML'): self._html(message) def _log_list(self, items, what='item'): msg = ['Altogether %d %s%s.' % (len(items), what, ['s',''][len(items)==1])] for index, item in enumerate(items): msg.append('%d: %s' % (index+1, item)) self._info('\n'.join(msg)) return items def _warn(self, message): logger.warn(message)
# -*- coding: utf-8 -*- import os import sys from robot.libraries.BuiltIn import BuiltIn from robot.api import logger from keywordgroup import KeywordGroup class _LoggingKeywords(KeywordGroup): # Private def _debug(self, message): logger.debug(message) def _get_log_dir(self): variables = BuiltIn().get_variables() logfile = variables['${LOG FILE}'] if logfile != 'NONE': return os.path.dirname(logfile) return variables['${OUTPUTDIR}'] def _html(self, message): logger.info(message, True, False) def _info(self, message): logger.info(message) def _log(self, message, level='INFO'): level = level.upper() if (level == 'INFO'): self._info(message) elif (level == 'DEBUG'): self._debug(message) elif (level == 'WARN'): self._warn(message) elif (level == 'HTML'): self._html(message) def _log_list(self, items, what='item'): msg = ['Altogether %d %s%s.' % (len(items), what, ['s',''][len(items)==1])] for index, item in enumerate(items): msg.append('%d: %s' % (index+1, item)) self._info('\n'.join(msg)) return items def _warn(self, message): logger.warn(message)
Update BuiltIn library reference for RF 2.9 compatibility
Update BuiltIn library reference for RF 2.9 compatibility
Python
apache-2.0
jollychang/robotframework-appiumlibrary,yahman72/robotframework-appiumlibrary,lzhang1/robotframework-appiumlibrary,Meallia/robotframework-appiumlibrary,minhnguyenphuonghoang/robotframework-appiumlibrary,valaxy/robotframework-appiumlibrary,serhatbolsu/robotframework-appiumlibrary
532df8a669d7e54125c102ef4821272dc24aab23
weasyprint/logger.py
weasyprint/logger.py
# coding: utf-8 """ weasyprint.logging ------------------ Logging setup. The rest of the code gets the logger through this module rather than ``logging.getLogger`` to make sure that it is configured. :copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS. :license: BSD, see LICENSE for details. """ from __future__ import division, unicode_literals import logging LOGGER = logging.getLogger('weasyprint') # Default to logging to stderr. if not LOGGER.handlers: LOGGER.addHandler(logging.StreamHandler()) if LOGGER.level == logging.NOTSET: LOGGER.setLevel(logging.INFO)
# coding: utf-8 """ weasyprint.logging ------------------ Logging setup. The rest of the code gets the logger through this module rather than ``logging.getLogger`` to make sure that it is configured. :copyright: Copyright 2011-2014 Simon Sapin and contributors, see AUTHORS. :license: BSD, see LICENSE for details. """ from __future__ import division, unicode_literals import logging LOGGER = logging.getLogger('weasyprint') # Default to logging to stderr. if not LOGGER.handlers: handler = logging.StreamHandler() formatter = logging.Formatter('%(levelname)s: %(message)s') handler.setFormatter(formatter) LOGGER.addHandler(handler) if LOGGER.level == logging.NOTSET: LOGGER.setLevel(logging.INFO)
Add a better default formatter for logs
Add a better default formatter for logs
Python
bsd-3-clause
Kozea/WeasyPrint,Kozea/WeasyPrint
14b8a2a689414e65efda9b466db430ed09f777d5
panoptes_client/utils.py
panoptes_client/utils.py
from __future__ import absolute_import, division, print_function from builtins import range import functools ITERABLE_TYPES = ( list, set, tuple, ) try: from numpy import ndarray ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,) except ImportError: pass def isiterable(v): return isinstance(v, ITERABLE_TYPES) def batchable(func=None, batch_size=100): def do_batch(*args, **kwargs): _batch_size = kwargs.pop('batch_size', batch_size) if isiterable(args[0]): _self = None to_batch = args[0] args = args[1:] else: _self = args[0] to_batch = args[1] args = args[2:] if not isiterable(to_batch): to_batch = [to_batch] if isinstance(to_batch, set): to_batch = list(to_batch) for _batch in [ to_batch[i:i+_batch_size] for i in range(0, len(to_batch), _batch_size) ]: if _self is None: func(_batch, *args, **kwargs) else: func(_self, _batch, *args, **kwargs) if func is None: return functools.partial(batchable, batch_size=batch_size) return do_batch
from __future__ import absolute_import, division, print_function from builtins import range import functools ITERABLE_TYPES = ( list, set, tuple, ) MISSING_POSITIONAL_ERR = 'Required positional argument (pos 1) not found' try: from numpy import ndarray ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,) except ImportError: pass def isiterable(v): return isinstance(v, ITERABLE_TYPES) def batchable(func=None, batch_size=100): def do_batch(*args, **kwargs): if len(args) == 0: raise TypeError(MISSING_POSITIONAL_ERR) _batch_size = kwargs.pop('batch_size', batch_size) if isiterable(args[0]): _self = None to_batch = args[0] args = args[1:] else: if len(args) == 1: raise TypeError(MISSING_POSITIONAL_ERR) _self = args[0] to_batch = args[1] args = args[2:] if not isiterable(to_batch): to_batch = [to_batch] if isinstance(to_batch, set): to_batch = list(to_batch) for _batch in [ to_batch[i:i+_batch_size] for i in range(0, len(to_batch), _batch_size) ]: if _self is None: func(_batch, *args, **kwargs) else: func(_self, _batch, *args, **kwargs) if func is None: return functools.partial(batchable, batch_size=batch_size) return do_batch
Raise TypeError if positional batchable argument is missing
Raise TypeError if positional batchable argument is missing e.g. if it's erroneously been passed as a named argument.
Python
apache-2.0
zooniverse/panoptes-python-client
6049a916ea3adfe4ef8a7ae9dbfc918b69907ef4
OnionLauncher/main.py
OnionLauncher/main.py
import sys from PyQt5.QtWidgets import QApplication, QMainWindow from PyQt5.uic import loadUi class MainWindow(QMainWindow): def __init__(self, *args): super(MainWindow, self).__init__(*args) loadUi("ui_files/main.ui", self) self.tbAdd.clicked.connect(self.addRow) self.tbRemove.clicked.connect(self.removeRow) def addRow(self): rowPos = self.twSettings.rowCount() self.twSettings.insertRow(rowPos) def removeRow(self): rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes())) rows.reverse() for row in rows: self.twSettings.removeRow(row) if __name__ == "__main__": app = QApplication(sys.argv) mw = MainWindow() mw.show() sys.exit(app.exec_())
import sys from PyQt5.QtWidgets import QApplication, QMainWindow from PyQt5.uic import loadUi class MainWindow(QMainWindow): def __init__(self, *args): super(MainWindow, self).__init__(*args) loadUi("ui_files/main.ui", self) buttons = { self.tbAdd: self.addRow, self.tbRemove: self.removeRow, self.btnSwitchTor: self.switchTor, } self.evAddClick(buttons) def evAddClick(self, obj_dict): for obj in obj_dict: obj.clicked.connect(obj_dict[obj]) def addRow(self): rowPos = self.twSettings.rowCount() self.twSettings.insertRow(rowPos) def removeRow(self): rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes())) rows.reverse() for row in rows: self.twSettings.removeRow(row) def switchTor(self): pass if __name__ == "__main__": app = QApplication(sys.argv) mw = MainWindow() mw.show() sys.exit(app.exec_())
Put mouse clicks in it's own dictionary
Put mouse clicks in it's own dictionary
Python
bsd-2-clause
neelchauhan/OnionLauncher
5e2021b33cc9e45e9e6ddfaa1aee928c583b868e
pskb_website/__init__.py
pskb_website/__init__.py
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) # Running on heroku if 'HEROKU' in os.environ: from example_config import HEROKU_ENV_REQUIREMENTS # example_config.py provides a blueprint for which variables to look for in # the environment and set in our app config. for var in HEROKU_ENV_REQUIREMENTS: app.config.setdefault(var, os.environ[var]) if 'DEBUG' in os.environ: app.config.setdefault('debug', True) else: app.config.from_object(os.environ['APP_SETTINGS']) app.secret_key = app.config['SECRET_KEY'] db = SQLAlchemy(app) import pskb_website.views
import os from flask import Flask from flask.ext.sqlalchemy import SQLAlchemy app = Flask(__name__) # Running on heroku if 'HEROKU' in os.environ: from example_config import HEROKU_ENV_REQUIREMENTS # example_config.py provides a blueprint for which variables to look for in # the environment and set in our app config. for var in HEROKU_ENV_REQUIREMENTS: app.config.setdefault(var, os.environ[var]) if 'DEBUG' in os.environ: app.config.setdefault('debug', True) print 'Config values' for key, value in app.config.iteritems(): print key, value else: app.config.from_object(os.environ['APP_SETTINGS']) app.secret_key = app.config['SECRET_KEY'] db = SQLAlchemy(app) import pskb_website.views
Print heroku if debug is true
Print heroku if debug is true
Python
agpl-3.0
paulocheque/guides-cms,paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms
8a827d3e86cf2f6b9d36812e7058560ae120d4b2
tests/test_watson.py
tests/test_watson.py
from pywatson.watson import Watson class TestWatson: def test_init(self, config): watson = Watson(url=config['url'], username=config['username'], password=config['password'])
from pywatson.answer.answer import Answer from pywatson.watson import Watson class TestWatson: def test_ask_question_basic(self, watson): answer = watson.ask_question('What is the Labour Code?') assert type(answer) is Answer
Add failing test for ask_question
Add failing test for ask_question
Python
mit
sherlocke/pywatson
7153c2be456084dfdd7cc346d62a6eb0fcaa2a31
teuthology/config.py
teuthology/config.py
import os import yaml import logging CONF_FILE = os.path.join(os.environ['HOME'], '.teuthology.yaml') log = logging.getLogger(__name__) class _Config(object): """ This class is intended to unify teuthology's many configuration files and objects. Currently it serves as a convenient interface to ~/.teuthology.yaml and nothing else. """ def __init__(self): if os.path.exists(CONF_FILE): self.__conf = yaml.safe_load(file(CONF_FILE)) else: log.debug("%s not found", CONF_FILE) self.__conf = {} # This property declaration exists mainly as an example; it is not # necessary unless you want to, say, define a set method and/or a # docstring. @property def lock_server(self): return self.__conf.get('lock_server') # This takes care of any and all of the rest. # If the parameter is defined, return it. Otherwise return None. def __getattr__(self, name): return self.__conf.get(name) config = _Config()
import os import yaml import logging CONF_FILE = os.path.join(os.environ['HOME'], '.teuthology.yaml') log = logging.getLogger(__name__) class _Config(object): """ This class is intended to unify teuthology's many configuration files and objects. Currently it serves as a convenient interface to ~/.teuthology.yaml and nothing else. """ def __init__(self): if os.path.exists(CONF_FILE): self.__conf = yaml.safe_load(file(CONF_FILE)) else: log.debug("%s not found", CONF_FILE) self.__conf = {} # This property declaration exists mainly as an example; it is not # necessary unless you want to, say, define a set method and/or a # docstring. @property def lock_server(self): """ The URL to your lock server. For example, Inktank uses: http://teuthology.front.sepia.ceph.com/locker/lock """ return self.__conf.get('lock_server') # This takes care of any and all of the rest. # If the parameter is defined, return it. Otherwise return None. def __getattr__(self, name): return self.__conf.get(name) config = _Config()
Add doc noting Inktank's lockserver URL
Add doc noting Inktank's lockserver URL Since I just removed it from lockstatus.py.
Python
mit
t-miyamae/teuthology,robbat2/teuthology,michaelsevilla/teuthology,michaelsevilla/teuthology,t-miyamae/teuthology,zhouyuan/teuthology,ivotron/teuthology,dreamhost/teuthology,ktdreyer/teuthology,dmick/teuthology,SUSE/teuthology,SUSE/teuthology,yghannam/teuthology,dmick/teuthology,dmick/teuthology,caibo2014/teuthology,ceph/teuthology,ivotron/teuthology,SUSE/teuthology,ceph/teuthology,ktdreyer/teuthology,yghannam/teuthology,dreamhost/teuthology,zhouyuan/teuthology,robbat2/teuthology,caibo2014/teuthology,tchaikov/teuthology,tchaikov/teuthology
685e0f7abb8658fe9ba0c54907f084561707c9cc
website/members/urls.py
website/members/urls.py
from django.conf.urls import url, include from members import views urlpatterns = [ url(r'^profile/$', views.profile, name='profile'), # Views given by django auth library url(r'', include('django.contrib.auth.urls')), ]
from django.conf.urls import url, include from members import views urlpatterns = [ url(r'^profile/$', views.profile, name='profile'), # Views given by django auth library url(r'', include('django.contrib.auth.urls')), ]
Add newline at the end of the file
:green_heart: Add newline at the end of the file
Python
agpl-3.0
Dekker1/moore,Dekker1/moore,UTNkar/moore,UTNkar/moore,UTNkar/moore,Dekker1/moore,Dekker1/moore,UTNkar/moore
de324cc798da8694bab510efd51de4bfda528df7
zinnia/views/entries.py
zinnia/views/entries.py
"""Views for Zinnia entries""" from django.views.generic.dates import BaseDateDetailView from zinnia.models.entry import Entry from zinnia.views.mixins.archives import ArchiveMixin from zinnia.views.mixins.entry_protection import EntryProtectionMixin from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin class EntryDateDetail(ArchiveMixin, EntryArchiveTemplateResponseMixin, CallableQuerysetMixin, BaseDateDetailView): """ Mixin combinating: - ArchiveMixin configuration centralizing conf for archive views - EntryArchiveTemplateResponseMixin to provide a custom templates depending on the date - BaseDateDetailView to retrieve the entry with date and slug - CallableQueryMixin to defer the execution of the *queryset* property when imported """ queryset = Entry.published.on_site class EntryDetail(EntryProtectionMixin, EntryDateDetail): """ Detailled view archive view for an Entry with password and login protections. """
"""Views for Zinnia entries""" from django.views.generic.dates import BaseDateDetailView from zinnia.models.entry import Entry from zinnia.views.mixins.archives import ArchiveMixin from zinnia.views.mixins.entry_preview import EntryPreviewMixin from zinnia.views.mixins.entry_protection import EntryProtectionMixin from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin class EntryDateDetail(ArchiveMixin, EntryArchiveTemplateResponseMixin, CallableQuerysetMixin, BaseDateDetailView): """ Mixin combinating: - ArchiveMixin configuration centralizing conf for archive views - EntryArchiveTemplateResponseMixin to provide a custom templates depending on the date - BaseDateDetailView to retrieve the entry with date and slug - CallableQueryMixin to defer the execution of the *queryset* property when imported """ queryset = Entry.published.on_site class EntryDetail(EntryPreviewMixin, EntryProtectionMixin, EntryDateDetail): """ Detailled archive view for an Entry with password and login protections and restricted preview. """
Implement the EntryPreviewMixin in the EntryDetail view
Implement the EntryPreviewMixin in the EntryDetail view
Python
bsd-3-clause
Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,petecummings/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia,Fantomas42/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,Fantomas42/django-blog-zinnia,1844144/django-blog-zinnia,Fantomas42/django-blog-zinnia,extertioner/django-blog-zinnia,marctc/django-blog-zinnia,aorzh/django-blog-zinnia,marctc/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ZuluPro/django-blog-zinnia,1844144/django-blog-zinnia,1844144/django-blog-zinnia
e93a321e3d137fb21a42d0e0bfd257a537be05d3
diy/parerga/config.py
diy/parerga/config.py
# -*- set coding: utf-8 -*- import os # directories constants PARERGA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p") PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static") PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates") # database location PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
# -*- set coding: utf-8 -*- import os # directories constants PARERGA_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p") PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static") PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates") # database location PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
Update path vars for the new source location
Update path vars for the new source location
Python
bsd-3-clause
nadirs/parerga,nadirs/parerga
37ddf8b41ee770a27c4eb59d27fc74fea60faaee
scipy/io/matlab/setup.py
scipy/io/matlab/setup.py
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('matlab', parent_package, top_path) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python def configuration(parent_package='io',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('matlab', parent_package, top_path) config.add_data_dir('tests') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Fix parent package of io.matlab.
Fix parent package of io.matlab.
Python
bsd-3-clause
anielsen001/scipy,kleskjr/scipy,WarrenWeckesser/scipy,cpaulik/scipy,felipebetancur/scipy,dominicelse/scipy,Shaswat27/scipy,arokem/scipy,aarchiba/scipy,ilayn/scipy,fernand/scipy,piyush0609/scipy,surhudm/scipy,sauliusl/scipy,mtrbean/scipy,petebachant/scipy,Gillu13/scipy,argriffing/scipy,sonnyhu/scipy,jor-/scipy,arokem/scipy,jonycgn/scipy,gertingold/scipy,jonycgn/scipy,WarrenWeckesser/scipy,dch312/scipy,chatcannon/scipy,anntzer/scipy,aeklant/scipy,pbrod/scipy,josephcslater/scipy,WillieMaddox/scipy,endolith/scipy,josephcslater/scipy,jjhelmus/scipy,ales-erjavec/scipy,trankmichael/scipy,petebachant/scipy,Kamp9/scipy,witcxc/scipy,mingwpy/scipy,Dapid/scipy,rmcgibbo/scipy,mdhaber/scipy,aman-iitj/scipy,sauliusl/scipy,sonnyhu/scipy,chatcannon/scipy,ChanderG/scipy,argriffing/scipy,mortada/scipy,teoliphant/scipy,Newman101/scipy,larsmans/scipy,jjhelmus/scipy,mtrbean/scipy,felipebetancur/scipy,matthewalbani/scipy,mdhaber/scipy,haudren/scipy,zaxliu/scipy,niknow/scipy,grlee77/scipy,aeklant/scipy,larsmans/scipy,giorgiop/scipy,mtrbean/scipy,juliantaylor/scipy,zaxliu/scipy,grlee77/scipy,gef756/scipy,maniteja123/scipy,fredrikw/scipy,sauliusl/scipy,fernand/scipy,aman-iitj/scipy,bkendzior/scipy,witcxc/scipy,newemailjdm/scipy,newemailjdm/scipy,ales-erjavec/scipy,kalvdans/scipy,trankmichael/scipy,mhogg/scipy,petebachant/scipy,chatcannon/scipy,mikebenfield/scipy,nvoron23/scipy,behzadnouri/scipy,teoliphant/scipy,dominicelse/scipy,aarchiba/scipy,trankmichael/scipy,kleskjr/scipy,lukauskas/scipy,zxsted/scipy,jamestwebber/scipy,sargas/scipy,mgaitan/scipy,vigna/scipy,jor-/scipy,Shaswat27/scipy,piyush0609/scipy,pnedunuri/scipy,endolith/scipy,gfyoung/scipy,jamestwebber/scipy,mingwpy/scipy,kalvdans/scipy,fernand/scipy,efiring/scipy,hainm/scipy,raoulbq/scipy,jonycgn/scipy,jjhelmus/scipy,mortada/scipy,pizzathief/scipy,jonycgn/scipy,zerothi/scipy,ortylp/scipy,ogrisel/scipy,gfyoung/scipy,felipebetancur/scipy,gfyoung/scipy,rmcgibbo/scipy,njwilson23/scipy,lukauskas/scipy,futurulus/scipy,cpaulik/scipy,minhlongdo/scipy,zerothi/scipy,lhilt/scipy,apbard/scipy,ilayn/scipy,niknow/scipy,mtrbean/scipy,Dapid/scipy,vhaasteren/scipy,futurulus/scipy,jseabold/scipy,anntzer/scipy,njwilson23/scipy,vberaudi/scipy,mortonjt/scipy,pnedunuri/scipy,haudren/scipy,chatcannon/scipy,ales-erjavec/scipy,kalvdans/scipy,felipebetancur/scipy,ChanderG/scipy,witcxc/scipy,WillieMaddox/scipy,ilayn/scipy,anielsen001/scipy,Gillu13/scipy,surhudm/scipy,FRidh/scipy,pyramania/scipy,ndchorley/scipy,gdooper/scipy,futurulus/scipy,e-q/scipy,kleskjr/scipy,larsmans/scipy,gef756/scipy,person142/scipy,Stefan-Endres/scipy,pyramania/scipy,aman-iitj/scipy,vigna/scipy,cpaulik/scipy,hainm/scipy,sargas/scipy,tylerjereddy/scipy,zaxliu/scipy,kalvdans/scipy,Newman101/scipy,ogrisel/scipy,person142/scipy,cpaulik/scipy,argriffing/scipy,Newman101/scipy,richardotis/scipy,gef756/scipy,sauliusl/scipy,tylerjereddy/scipy,pbrod/scipy,josephcslater/scipy,raoulbq/scipy,jonycgn/scipy,ortylp/scipy,nonhermitian/scipy,jakevdp/scipy,gertingold/scipy,rmcgibbo/scipy,mingwpy/scipy,zaxliu/scipy,pschella/scipy,mortada/scipy,Dapid/scipy,maciejkula/scipy,mortonjt/scipy,woodscn/scipy,njwilson23/scipy,fredrikw/scipy,jjhelmus/scipy,ChanderG/scipy,newemailjdm/scipy,fredrikw/scipy,niknow/scipy,richardotis/scipy,perimosocordiae/scipy,sriki18/scipy,mhogg/scipy,nvoron23/scipy,dch312/scipy,maciejkula/scipy,richardotis/scipy,scipy/scipy,Newman101/scipy,sriki18/scipy,Eric89GXL/scipy,hainm/scipy,grlee77/scipy,pbrod/scipy,anielsen001/scipy,woodscn/scipy,felipebetancur/scipy,rgommers/scipy,fredrikw/scipy,ortylp/scipy,ogrisel/scipy,mingwpy/scipy,andim/scipy,kleskjr/scipy,ortylp/scipy,Gillu13/scipy,Shaswat27/scipy,Srisai85/scipy,vberaudi/scipy,juliantaylor/scipy,befelix/scipy,vhaasteren/scipy,perimosocordiae/scipy,FRidh/scipy,Newman101/scipy,vhaasteren/scipy,pschella/scipy,behzadnouri/scipy,grlee77/scipy,andim/scipy,sonnyhu/scipy,zerothi/scipy,efiring/scipy,e-q/scipy,e-q/scipy,Stefan-Endres/scipy,futurulus/scipy,aeklant/scipy,pbrod/scipy,scipy/scipy,haudren/scipy,befelix/scipy,maciejkula/scipy,kleskjr/scipy,gertingold/scipy,efiring/scipy,gdooper/scipy,dch312/scipy,richardotis/scipy,Gillu13/scipy,maniteja123/scipy,andim/scipy,andyfaff/scipy,mortonjt/scipy,raoulbq/scipy,jor-/scipy,person142/scipy,matthewalbani/scipy,jseabold/scipy,perimosocordiae/scipy,jakevdp/scipy,zaxliu/scipy,anielsen001/scipy,dch312/scipy,hainm/scipy,gfyoung/scipy,juliantaylor/scipy,ndchorley/scipy,haudren/scipy,andim/scipy,zxsted/scipy,jakevdp/scipy,mgaitan/scipy,matthewalbani/scipy,nonhermitian/scipy,gdooper/scipy,anntzer/scipy,haudren/scipy,giorgiop/scipy,vigna/scipy,tylerjereddy/scipy,behzadnouri/scipy,person142/scipy,endolith/scipy,njwilson23/scipy,raoulbq/scipy,ortylp/scipy,Eric89GXL/scipy,jseabold/scipy,teoliphant/scipy,chatcannon/scipy,e-q/scipy,gef756/scipy,WarrenWeckesser/scipy,josephcslater/scipy,mortonjt/scipy,surhudm/scipy,dominicelse/scipy,andyfaff/scipy,mgaitan/scipy,jakevdp/scipy,WarrenWeckesser/scipy,sriki18/scipy,mhogg/scipy,woodscn/scipy,giorgiop/scipy,Eric89GXL/scipy,scipy/scipy,teoliphant/scipy,jamestwebber/scipy,richardotis/scipy,sonnyhu/scipy,perimosocordiae/scipy,mingwpy/scipy,mdhaber/scipy,Shaswat27/scipy,mdhaber/scipy,petebachant/scipy,apbard/scipy,newemailjdm/scipy,Gillu13/scipy,felipebetancur/scipy,nmayorov/scipy,lhilt/scipy,Srisai85/scipy,maniteja123/scipy,andim/scipy,njwilson23/scipy,zerothi/scipy,mortada/scipy,aman-iitj/scipy,piyush0609/scipy,lukauskas/scipy,mgaitan/scipy,behzadnouri/scipy,maniteja123/scipy,Stefan-Endres/scipy,sriki18/scipy,jor-/scipy,newemailjdm/scipy,jamestwebber/scipy,zxsted/scipy,lhilt/scipy,niknow/scipy,Eric89GXL/scipy,rgommers/scipy,giorgiop/scipy,aarchiba/scipy,sauliusl/scipy,aman-iitj/scipy,gef756/scipy,anntzer/scipy,futurulus/scipy,jakevdp/scipy,efiring/scipy,anntzer/scipy,scipy/scipy,zerothi/scipy,efiring/scipy,giorgiop/scipy,andim/scipy,nvoron23/scipy,mikebenfield/scipy,befelix/scipy,newemailjdm/scipy,sriki18/scipy,befelix/scipy,apbard/scipy,ndchorley/scipy,vanpact/scipy,Gillu13/scipy,Stefan-Endres/scipy,mhogg/scipy,mhogg/scipy,rmcgibbo/scipy,bkendzior/scipy,sargas/scipy,Stefan-Endres/scipy,aarchiba/scipy,mdhaber/scipy,mortada/scipy,dch312/scipy,scipy/scipy,Dapid/scipy,pschella/scipy,raoulbq/scipy,pbrod/scipy,vanpact/scipy,fernand/scipy,woodscn/scipy,matthew-brett/scipy,anielsen001/scipy,bkendzior/scipy,anielsen001/scipy,gdooper/scipy,piyush0609/scipy,gertingold/scipy,tylerjereddy/scipy,matthewalbani/scipy,pnedunuri/scipy,Srisai85/scipy,bkendzior/scipy,raoulbq/scipy,andyfaff/scipy,FRidh/scipy,Stefan-Endres/scipy,richardotis/scipy,ndchorley/scipy,pizzathief/scipy,matthew-brett/scipy,aeklant/scipy,FRidh/scipy,ChanderG/scipy,WillieMaddox/scipy,aman-iitj/scipy,pyramania/scipy,pnedunuri/scipy,minhlongdo/scipy,fernand/scipy,WillieMaddox/scipy,pyramania/scipy,larsmans/scipy,andyfaff/scipy,jsilter/scipy,befelix/scipy,vhaasteren/scipy,piyush0609/scipy,pizzathief/scipy,witcxc/scipy,Dapid/scipy,ilayn/scipy,jseabold/scipy,andyfaff/scipy,anntzer/scipy,rgommers/scipy,pschella/scipy,vberaudi/scipy,behzadnouri/scipy,Srisai85/scipy,josephcslater/scipy,pizzathief/scipy,apbard/scipy,cpaulik/scipy,zxsted/scipy,nmayorov/scipy,vberaudi/scipy,mingwpy/scipy,zerothi/scipy,zxsted/scipy,arokem/scipy,lukauskas/scipy,gfyoung/scipy,nmayorov/scipy,Dapid/scipy,maciejkula/scipy,Kamp9/scipy,jonycgn/scipy,kalvdans/scipy,apbard/scipy,hainm/scipy,futurulus/scipy,lukauskas/scipy,efiring/scipy,Srisai85/scipy,teoliphant/scipy,sauliusl/scipy,ortylp/scipy,larsmans/scipy,WarrenWeckesser/scipy,ales-erjavec/scipy,maciejkula/scipy,nvoron23/scipy,woodscn/scipy,mdhaber/scipy,matthew-brett/scipy,rmcgibbo/scipy,witcxc/scipy,zaxliu/scipy,jsilter/scipy,minhlongdo/scipy,trankmichael/scipy,ndchorley/scipy,endolith/scipy,ilayn/scipy,nonhermitian/scipy,larsmans/scipy,bkendzior/scipy,tylerjereddy/scipy,zxsted/scipy,chatcannon/scipy,surhudm/scipy,pbrod/scipy,Newman101/scipy,maniteja123/scipy,Kamp9/scipy,juliantaylor/scipy,ChanderG/scipy,pnedunuri/scipy,WillieMaddox/scipy,vanpact/scipy,rgommers/scipy,surhudm/scipy,behzadnouri/scipy,Kamp9/scipy,ndchorley/scipy,mortonjt/scipy,rgommers/scipy,matthew-brett/scipy,pnedunuri/scipy,mikebenfield/scipy,sargas/scipy,arokem/scipy,mgaitan/scipy,sriki18/scipy,matthew-brett/scipy,jjhelmus/scipy,pschella/scipy,dominicelse/scipy,niknow/scipy,mikebenfield/scipy,gdooper/scipy,jamestwebber/scipy,minhlongdo/scipy,vigna/scipy,ogrisel/scipy,vhaasteren/scipy,Kamp9/scipy,minhlongdo/scipy,mortonjt/scipy,Eric89GXL/scipy,mhogg/scipy,ogrisel/scipy,Shaswat27/scipy,andyfaff/scipy,rmcgibbo/scipy,lhilt/scipy,Eric89GXL/scipy,pyramania/scipy,hainm/scipy,perimosocordiae/scipy,ales-erjavec/scipy,arokem/scipy,fredrikw/scipy,jseabold/scipy,aeklant/scipy,mtrbean/scipy,FRidh/scipy,fredrikw/scipy,Shaswat27/scipy,trankmichael/scipy,matthewalbani/scipy,argriffing/scipy,trankmichael/scipy,surhudm/scipy,kleskjr/scipy,vanpact/scipy,vhaasteren/scipy,mikebenfield/scipy,endolith/scipy,ilayn/scipy,Kamp9/scipy,petebachant/scipy,FRidh/scipy,mtrbean/scipy,endolith/scipy,cpaulik/scipy,scipy/scipy,vanpact/scipy,sargas/scipy,nvoron23/scipy,jor-/scipy,mortada/scipy,mgaitan/scipy,argriffing/scipy,vberaudi/scipy,WillieMaddox/scipy,gef756/scipy,grlee77/scipy,person142/scipy,nonhermitian/scipy,gertingold/scipy,piyush0609/scipy,njwilson23/scipy,jsilter/scipy,maniteja123/scipy,jseabold/scipy,nonhermitian/scipy,sonnyhu/scipy,lhilt/scipy,juliantaylor/scipy,jsilter/scipy,Srisai85/scipy,jsilter/scipy,minhlongdo/scipy,aarchiba/scipy,vberaudi/scipy,nmayorov/scipy,fernand/scipy,giorgiop/scipy,dominicelse/scipy,pizzathief/scipy,WarrenWeckesser/scipy,lukauskas/scipy,petebachant/scipy,vigna/scipy,ChanderG/scipy,e-q/scipy,niknow/scipy,haudren/scipy,woodscn/scipy,ales-erjavec/scipy,argriffing/scipy,nvoron23/scipy,nmayorov/scipy,sonnyhu/scipy,vanpact/scipy,perimosocordiae/scipy
624d6e4fc5455720badf4315e06f423eb60411ab
scripts/init_tree.py
scripts/init_tree.py
import os import shutil def main(): cwd = os.getcwd() if not cwd.endswith(os.path.join('FRENSIE', 'scripts')): print 'This script must be run in \"FRENSIE/scipts\"' print 'Your CWD is', cwd return 1 os.chdir('../../') os.mkdir('frensie_build_tree') #os.renames('FRENSIE', 'frensie_build_tree/FRENSIE') os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE') os.chdir('frensie_build_tree') os.symlink('FRENSIE', 'src') os.mkdir('deps') os.mkdir('deps/install') os.mkdir('deps/tars') os.mkdir('build') shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile') shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh') print """ To build dependencies move necessary tars to deps/tars. cd to frensie_build_tree/deps and run make all. To once all dependecies exist in frensie_build_tree/deps/install cd to frensie_build_tree/build and run bash frensie.sh """ if __name__ == '__main__': main()
import os import shutil def main(): cwd = os.getcwd() if not cwd.endswith(os.path.join('FRENSIE', 'scripts')): print 'This script must be run in \"FRENSIE/scipts\"' print 'Your CWD is', cwd return 1 os.chdir('../../') os.mkdir('frensie_build_tree') #os.renames('FRENSIE', 'frensie_build_tree/FRENSIE') os.symlink(os.path.abspath('FRENSIE'), 'frensie_build_tree/FRENSIE') os.chdir('frensie_build_tree') os.symlink('FRENSIE', 'src') os.mkdir('deps') os.mkdir('deps/install') os.mkdir('deps/tars') os.mkdir('build') shutil.copyfile('src/scripts/makefile.deps', 'deps/makefile') shutil.copyfile('src/scripts/lazy.sh', 'deps/lazy.sh') shutil.copyfile('src/scripts/frensie.sh', 'build/frensie.sh') #shutil.copyfile('src/scripts/source_deps.sh', 'build/source_deps.sh') print """ To build dependencies move necessary tars to deps/tars. cd to frensie_build_tree/deps and run make all. To once all dependecies exist in frensie_build_tree/deps/install cd to frensie_build_tree/build and run bash frensie.sh """ if __name__ == '__main__': main()
Update to copy new scripts
Update to copy new scripts
Python
bsd-3-clause
lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123,lkersting/SCR-2123
0338f8c66f14d6dbf43a2583ba17a8ae7d690466
apps/survey/urls.py
apps/survey/urls.py
from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^group_management/$', views.group_management, name='group_management'), #url(r'^survey_management/$', views.survey_management, name='survey_management'), url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'), #url(r'^survey_data/(?P<survey_shortname>.+)/$', views.survey_data, name='survey_data'), #url(r'intake/$', views.survey_intake, name='survey_intake'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
from django.conf.urls.defaults import * from . import views urlpatterns = patterns('', url(r'^profile/$', views.profile_index, name='survey_profile'), url(r'^profile/electric/$', views.profile_electric, name='survey_profile_electric'), #url(r'^profile/intake/$', views.survey_intake, name='survey_profile_intake'), url(r'^profile/surveys/$', views.survey_management, name='survey_management'), url(r'^main/$', views.main_index), url(r'^survey_management/$', views.survey_management, name='survey_management'), #url(r'^survey_data/(?P<survey_shortname>.+)/(?P<id>\d+)/$', views.survey_data, name='survey_data'), url(r'^intake/$', views.survey_data, name='survey_data'), url(r'^monthly/(?P<id>\d+)/$', views.survey_data_monthly ,name='survey_data_monthly'), url(r'^thanks_profile/$', views.thanks_profile, name='profile_thanks'), #url(r'^select/$', views.select_user, name='survey_select_user'), url(r'^$', views.index, name='survey_index'), )
Modify call to personal surveis
Modify call to personal surveis
Python
agpl-3.0
chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork,chispita/epiwork
2724b4dd7ed350baeae0a8e0ef53475f40b1208b
project_generator/tools/makearmclang.py
project_generator/tools/makearmclang.py
# Copyright 2020 Chris Reed # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from copy import deepcopy import logging from .makefile import MakefileTool logger = logging.getLogger('progen.tools.armclang') class MakefileArmclang(MakefileTool): def __init__(self, workspace, env_settings): MakefileTool.__init__(self, workspace, env_settings, logger) @staticmethod def get_toolnames(): return ['make_armclang'] @staticmethod def get_toolchain(): return 'armclang' def export_project(self): """ Processes misc options specific for AC6, and run generator """ generated_projects = deepcopy(self.generated_projects) self.process_data_for_makefile(self.workspace) generated_projects['path'], generated_projects['files']['makefile'] = \ self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile', self.workspace['output_dir']['path']) return generated_projects
# Copyright 2020 Chris Reed # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from copy import deepcopy import logging from .makefile import MakefileTool logger = logging.getLogger('progen.tools.armclang') class MakefileArmclang(MakefileTool): def __init__(self, workspace, env_settings): MakefileTool.__init__(self, workspace, env_settings, logger) # enable preprocessing linker files for GCC ARM self.workspace['preprocess_linker_file'] = True self.workspace['linker_extension'] = '.sct' @staticmethod def get_toolnames(): return ['make_armclang'] @staticmethod def get_toolchain(): return 'armclang' def export_project(self): """ Processes misc options specific for AC6, and run generator """ generated_projects = deepcopy(self.generated_projects) self.process_data_for_makefile(self.workspace) generated_projects['path'], generated_projects['files']['makefile'] = \ self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile', self.workspace['output_dir']['path']) return generated_projects
Enable linker preprocessing for armclang.
Enable linker preprocessing for armclang. This should be temporary; for some reason the .sct cpp shebang isn't working for me. Same result in any case.
Python
apache-2.0
project-generator/project_generator
9ae5ea3876fae6ef0bc092d87c71d9ea86040cf7
InvenTree/company/api.py
InvenTree/company/api.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' company_api_urls = [ url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' class CompanyDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Company.objects.all() serializer_class = CompanySerializer permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] company_api_urls = [ url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'), url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
Add RUD endpoint for Company
Add RUD endpoint for Company
Python
mit
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
a8818e2058fdfaec7f283a5115619d42d23b7dde
anchorhub/builtin/github/writer.py
anchorhub/builtin/github/writer.py
""" File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
""" File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
Use Setext strategy in GitHub built in Writer
Use Setext strategy in GitHub built in Writer
Python
apache-2.0
samjabrahams/anchorhub
c154d79ba13d95f3240efd9eb4725cf9fc16060f
forms.py
forms.py
from flask_wtf import Form from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(Form): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField from wtforms.validators import DataRequired, Email class Login(FlaskForm): username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()])
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
Change deprecated flask_wtf.Form with flask_wtf.FlaskForm
Python
mit
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
e6400b1cce8f35466818e2424adf11e7b18aba6f
sknn/tests/test_rules.py
sknn/tests/test_rules.py
import unittest from nose.tools import (assert_is_not_none, assert_false, assert_raises, assert_equal) import numpy as np from sknn.mlp import MultiLayerPerceptronRegressor as MLPR class TestLearningRules(unittest.TestCase): def test_default(self): self._run(MLPR(layers=[("Linear",)], learning_rule='default')) def test_momentum(self): self._run(MLPR(layers=[("Linear",)], learning_rule='momentum')) def test_rmsprop(self): self._run(MLPR(layers=[("Linear",)], learning_rule='rmsprop')) def test_rmsprop(self): self._run(MLPR(layers=[("Linear",)], dropout=True) def test_unknown(self): assert_raises(NotImplementedError, MLPR, layers=[], learning_rule='unknown') def _run(self, nn): a_in, a_out = np.zeros((8,16)), np.zeros((8,4)) nn.fit(a_in, a_out) a_test = nn.predict(a_in) assert_equal(type(a_out), type(a_test))
import unittest from nose.tools import (assert_is_not_none, assert_false, assert_raises, assert_equal) import numpy as np from sknn.mlp import MultiLayerPerceptronRegressor as MLPR class TestLearningRules(unittest.TestCase): def test_default(self): self._run(MLPR(layers=[("Linear",)], learning_rule='default')) def test_momentum(self): self._run(MLPR(layers=[("Linear",)], learning_rule='momentum')) def test_rmsprop(self): self._run(MLPR(layers=[("Linear",)], learning_rule='rmsprop')) def test_rmsprop(self): self._run(MLPR(layers=[("Linear",)], dropout=True)) def test_unknown(self): assert_raises(NotImplementedError, MLPR, layers=[], learning_rule='unknown') def _run(self, nn): a_in, a_out = np.zeros((8,16)), np.zeros((8,4)) nn.fit(a_in, a_out) a_test = nn.predict(a_in) assert_equal(type(a_out), type(a_test))
Build fix for syntax error in test files.
Build fix for syntax error in test files.
Python
bsd-3-clause
IndraVikas/scikit-neuralnetwork,gticket/scikit-neuralnetwork,agomariz/scikit-neuralnetwork,freakynit/scikit-neuralnetwork,KhanSuleyman/scikit-neuralnetwork,capitancambio/scikit-neuralnetwork,aigamedev/scikit-neuralnetwork
ce95e50b7cb3ef9bbabddb033352aacb96b9237a
pywikibot/families/wikivoyage_family.py
pywikibot/families/wikivoyage_family.py
# -*- coding: utf-8 -*- """Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # from __future__ import absolute_import, unicode_literals __version__ = '$Id$' # The new wikivoyage family that is hosted at wikimedia from pywikibot import family class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'es', 'he', 'zh', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
# -*- coding: utf-8 -*- """Family module for Wikivoyage.""" # # (C) Pywikibot team, 2012-2016 # # Distributed under the terms of the MIT license. # # The new wikivoyage family that is hosted at wikimedia from __future__ import absolute_import, unicode_literals from pywikibot import family __version__ = '$Id$' class Family(family.SubdomainFamily, family.WikimediaFamily): """Family class for Wikivoyage.""" name = 'wikivoyage' def __init__(self): """Constructor.""" self.languages_by_size = [ 'en', 'de', 'fa', 'it', 'fr', 'ru', 'pl', 'nl', 'pt', 'fi', 'es', 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', ] super(Family, self).__init__() # Global bot allowed languages on # https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementation self.cross_allowed = ['es', 'ru', ]
Add fi:wikivoyage and sort by current article count
Add fi:wikivoyage and sort by current article count Fix also pycodestyle (former PEP8) E402 problem Bug: T153470 Change-Id: Id9bc980c7a9cfb21063597a3d5eae11c31d8040c
Python
mit
Darkdadaah/pywikibot-core,magul/pywikibot-core,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,magul/pywikibot-core,happy5214/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,wikimedia/pywikibot-core,PersianWikipedia/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,hasteur/g13bot_tools_new,npdoty/pywikibot,wikimedia/pywikibot-core
9cc15bc4a7ed8efb82071fa19e9d1ada8771a87d
app/soc/views/helper/decorators.py
app/soc/views/helper/decorators.py
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Views decorators. """ __authors__ = [ '"Pawel Solyga" <pawel.solyga@gmail.com>', ] import logging from functools import wraps from google.appengine.runtime import DeadlineExceededError from django import http def view(func): """Decorator that insists that exceptions are handled by view.""" @wraps(func) def view_wrapper(request, *args, **kwds): try: return func(request, *args, **kwds) except DeadlineExceededError: logging.exception('DeadlineExceededError') return http.HttpResponse('DeadlineExceededError') except MemoryError: logging.exception('MemoryError') return http.HttpResponse('MemoryError') except AssertionError: logging.exception('AssertionError') return http.HttpResponse('AssertionError') return view_wrapper
#!/usr/bin/python2.5 # # Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Views decorators. """ __authors__ = [ '"Pawel Solyga" <pawel.solyga@gmail.com>', ] import logging from functools import wraps from google.appengine.runtime import DeadlineExceededError from django import http def view(func): """Decorator that insists that exceptions are handled by view.""" @wraps(func) def view_wrapper(*args, **kwds): try: return func(*args, **kwds) except DeadlineExceededError: logging.exception('DeadlineExceededError') return http.HttpResponse('DeadlineExceededError') except MemoryError: logging.exception('MemoryError') return http.HttpResponse('MemoryError') except AssertionError: logging.exception('AssertionError') return http.HttpResponse('AssertionError') return view_wrapper
Remove not needed request argument in view decorator.
Remove not needed request argument in view decorator. Patch by: Pawel Solyga Review by: to-be-reviewed
Python
apache-2.0
MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging
8f59e540cef27ae658f2e01a6a5bf7d765e76ae7
constance/base.py
constance/base.py
from . import settings, utils from django.conf import settings as django_settings class Config(object): """ The global config wrapper that handles the backend. """ def __init__(self): super(Config, self).__setattr__('_backend', utils.import_module_attr(settings.BACKEND)()) def __getattr__(self, key): try: default = settings.CONFIG[key][0] except KeyError: raise AttributeError(key) result = self._backend.get(key) # use Django settings as primary source of default # for example DEBUG if is in django settings will be set as default if hasattr(django_settings, key): return getattr(django_settings, key, result) return result or default def __setattr__(self, key, value): if key not in settings.CONFIG: raise AttributeError(key) self._backend.set(key, value) def __dir__(self): return settings.CONFIG.keys()
from . import settings, utils from leonardo import settings as django_settings class Config(object): """ The global config wrapper that handles the backend. """ def __init__(self): super(Config, self).__setattr__('_backend', utils.import_module_attr(settings.BACKEND)()) def get_default(self, key): try: default = settings.CONFIG[key][0] except KeyError: raise AttributeError(key) return default def __getattr__(self, key): result = self._backend.get(key) # use Django settings as primary source of default # for example DEBUG if is in django settings will be set as default return getattr(django_settings, key, result or self.get_default(key)) def __setattr__(self, key, value): if key not in settings.CONFIG: raise AttributeError(key) self._backend.set(key, value) def __dir__(self): return settings.CONFIG.keys()
Use leonardo settings instead of django settings.
Use leonardo settings instead of django settings.
Python
bsd-3-clause
django-leonardo/django-constance,django-leonardo/django-constance
38216f9d1b875c31b97c80bb9217557e67c92ff3
spicedham/backend.py
spicedham/backend.py
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classifier, key, default) for key in keys] def set_key_list(self, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classifier, key, value) for key, value in key_value_pairs] def set_key(self, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
class BaseBackend(object): """ A base class for backend plugins. """ def __init__(self, config): pass def reset(self): """ Resets the training data to a blank slate. """ raise NotImplementedError() def get_key(self, classification_type, classifier, key, default=None): """ Gets the value held by the classifier, key composite key. If it doesn't exist, return default. """ raise NotImplementedError() def get_key_list(self, classification_type, classifier, keys, default=None): """ Given a list of key, classifier pairs get all values. If key, classifier doesn't exist, return default. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.get_key(classification_type, classifier, key, default) for classifier, key in izip(repeat(classifier), keys)] def set_key_list(self, classification_type, classifier, key_value_pairs): """ Given a list of pairs of key, value and a classifier set them all. Subclasses can override this to make more efficient queries for bulk requests. """ return [self.set_key(classification_type, classifier, key, value) for classifier, key, value in izip(repeat(classifier), key_value_pairs)] def set_key(self, classification_type, classifier, key, value): """ Set the value held by the classifier, key composite key. """ raise NotImplementedError()
Add classifier type to the base class
Add classifier type to the base class
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
ba2f2d7e53f0ffc58c882d78f1b8bc9a468eb164
predicates.py
predicates.py
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(self.members) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
class OneOf: def __init__(self, members): self.members = members def __call__(self, candidate): if candidate in self.members: return True return "%s not in %s" % (candidate, self.members) def __repr__(self): return "one of %s" % ', '.join(map(repr, self.members)) def oneof(*members): return OneOf(members) class InRange: def __init__(self, start, end): self.start = start self.end = end def __call__(self, candidate): if self.start <= candidate <= self.end: return True return "%s not between %s and %s" % (candidate, self.start, self.end) def __repr__(self): return "between %s and %s" % (self.start, self.end) def inrange(start, end): return InRange(start, end)
Fix problem rendering oneof() predicate when the members aren't strings
Fix problem rendering oneof() predicate when the members aren't strings
Python
mit
mrozekma/pytypecheck
7955e777d6ba3bbbd104bd3916f131ab7fa8f8b5
asyncmongo/__init__.py
asyncmongo/__init__.py
#!/bin/env python # # Copyright 2010 bit.ly # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ AsyncMongo is an asynchronous library for accessing mongo http://github.com/bitly/asyncmongo """ try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
#!/bin/env python # # Copyright 2010 bit.ly # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ AsyncMongo is an asynchronous library for accessing mongo http://github.com/bitly/asyncmongo """ try: import bson except ImportError: raise ImportError("bson library not installed. Install pymongo >= 1.9 https://github.com/mongodb/mongo-python-driver") # also update in setup.py version = "1.3" version_info = (1, 3) ASCENDING = 1 """Ascending sort order.""" DESCENDING = -1 """Descending sort order.""" GEO2D = "2d" """Index specifier for a 2-dimensional `geospatial index`""" TEXT = '{ $meta: "textScore" }' """TEXT Index sort order.""" from errors import (Error, InterfaceError, AuthenticationError, DatabaseError, RSConnectionError, DataError, IntegrityError, ProgrammingError, NotSupportedError) from client import Client
Support Sort Order For TEXT Index
Support Sort Order For TEXT Index
Python
apache-2.0
RealGeeks/asyncmongo
26efd98c88a627f76ebd0865053353eb7a30e3bb
.glerbl/repo_conf.py
.glerbl/repo_conf.py
checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
import sys import os dirname = os.path.dirname(__file__) python_path = os.path.join(os.path.dirname(dirname), "selenium_test", "lib") if "PYTHONPATH" not in os.environ: os.environ["PYTHONPATH"] = python_path else: os.environ["PYTHONPATH"] = python_path + ":" + os.environ["PYTHONPATH"] checks = { 'pre-commit': [ # BEFORE_COMMIT in the root of the working tree can be used as # reminder to do something before the next commit. "no_before_commit", # We only allow ASCII filenames. "no_non_ascii_filenames", # We don't allow trailing whitespaces. "no_trailing_whitespace", # Python files must conform to PEP8 "python_pep8", # Python files must not have trailing semicolons "python_no_trailing_semicolon" ] }
Modify PYTHONPATH so that pylint is able to find wedutil.
Modify PYTHONPATH so that pylint is able to find wedutil.
Python
mpl-2.0
mangalam-research/wed,slattery/wed,lddubeau/wed,slattery/wed,mangalam-research/wed,slattery/wed,mangalam-research/wed,lddubeau/wed,mangalam-research/wed,lddubeau/wed,lddubeau/wed
9952a0e7dce1dbe39921c42ae938f890d88fcc10
lib/rpnpy/__init__.py
lib/rpnpy/__init__.py
import sys import ctypes as _ct if sys.version_info < (3,): integer_types = (int, long,) range = xrange else: integer_types = (int,) long = int # xrange = range C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii')) C_WCHAR2CHAR.__doc__ = 'Convert str to bytes' C_CHAR2WCHAR = lambda x: str(x.decode('ascii')) C_CHAR2WCHAR.__doc__ = 'Convert bytes to str' C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x)) C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
import sys import ctypes as _ct if sys.version_info < (3,): integer_types = (int, long,) range = xrange else: integer_types = (int,) long = int range = range C_WCHAR2CHAR = lambda x: bytes(str(x).encode('ascii')) C_WCHAR2CHAR.__doc__ = 'Convert str to bytes' C_CHAR2WCHAR = lambda x: str(x.decode('ascii')) C_CHAR2WCHAR.__doc__ = 'Convert bytes to str' C_MKSTR = lambda x: _ct.create_string_buffer(C_WCHAR2CHAR(x)) C_MKSTR.__doc__ = 'alias to ctypes.create_string_buffer, make sure bytes are provided'
Add missing rpnpy.range reference for Python 3.
Add missing rpnpy.range reference for Python 3. Signed-off-by: Stephane_Chamberland <1054841519c328088796c1f3c72c14f95c4efe35@science.gc.ca> (cherry picked from commit 23860277c006d9635dedcaaa5e065c7aad199d8c)
Python
lgpl-2.1
meteokid/python-rpn,meteokid/python-rpn,meteokid/python-rpn,meteokid/python-rpn
017d099ee598da94d1ca6118d6c66b006de7b1c1
names.py
names.py
import re # A regular expression is a string like what you see below between the quote # marks, and the ``re`` module interprets it as a pattern. Each regular # expression describes a small program that takes another string as input and # returns information about that string. See # http://docs.python.org/library/re.html. The ``re`` module provides the # ``compile`` function, which prepares regex patterns for use in searching input # strings. # # We put an ``r`` before the string so that Python doesn't interpret the # backslashes before the ``re`` module gets to see them. (E.g., ``\n`` means a # newline character, so ``\n`` is a single character, not two as they appear in # the source code.) # # The Unicode flag lets us handle words with accented characters. FIRST_LAST = re.compile(r"(\w*)\s+((?:\w|\s|['-]){2,})", flags=re.UNICODE) def split_name(name): '''Return ("First", "Last") tuple from a string like "First Last". ``name`` is a string. This function returns a tuple of strings. When a non-matching string is encoutered, we yield ``None``. ''' match = FIRST_LAST.search(name) return None if match is None else (match.group(1), match.group(2))
import re # A regular expression is a string like what you see below between the quote # marks, and the ``re`` module interprets it as a pattern. Each regular # expression describes a small program that takes another string as input and # returns information about that string. See # http://docs.python.org/library/re.html. The ``re`` module provides the # ``compile`` function, which prepares regex patterns for use in searching input # strings. # # We put an ``r`` before the string so that Python doesn't interpret the # backslashes before the ``re`` module gets to see them. (E.g., ``\n`` means a # newline character, so ``\n`` is a single character, not two as they appear in # the source code.) # # The Unicode flag lets us handle words with accented characters. FIRST_LAST = re.compile(r"(\w*)\s+((?:\w|\s|['-]){2,})", flags=re.UNICODE) def split_name(name): '''Return ("First", "Last") tuple from a string like "First Last". ``name`` is a string. This function returns a tuple of strings. When a non-matching string is encoutered, we return ``None``. ''' match = FIRST_LAST.search(name) return None if match is None else (match.group(1), match.group(2))
Remove last reference to generators
Remove last reference to generators
Python
unlicense
wkschwartz/first-last
7bd1a1db924ffbe23e66e178e0044229f3d207cd
navpy.py
navpy.py
import numpy as np def eul2quat(rotAngle1,rotAngle2,rotAngle3, input_unit='rad',rotation_sequence='ZYX'): """ """ """ # INPUT CHECK rotAngle1 = np.array(rotAngle1) rotAngle2 = np.array(rotAngle2) rotAngle3 = np.array(rotAngle3) if(len(rotAngle1.shape)==0): rotAngle1.reshape(1,) if(len(rotAngle2.shape)==0): rotAngle2.reshape(1,) if(len(rotAngle3.shape)==0): rotAngle3.reshape(1,) if(len(rotAngle1.shape)==2) rotAngle1.shape[0] """ if(input_unit=='deg'): rotAngle1 = np.deg2rad(rotAngle1) rotAngle2 = np.deg2rad(rotAngle2) rotAngle3 = np.deg2rad(rotAngle3) rotAngle1 /= 2.0 rotAngle2 /= 2.0 rotAngle3 /= 2.0 if(rotation_sequence=='ZYX'): q0 = np.cos(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) + \ np.sin(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3) qvec = np.zeros(3) qvec[0] = np.cos(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) - \ np.sin(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) qvec[1] = np.cos(rotAngle1)*np.sin(rotAngle2)*np.cos(rotAngle3) + \ np.sin(rotAngle1)*np.cos(rotAngle2)*np.sin(rotAngle3) qvec[2] = np.sin(rotAngle1)*np.cos(rotAngle2)*np.cos(rotAngle3) - \ np.cos(rotAngle1)*np.sin(rotAngle2)*np.sin(rotAngle3) return q0, qvec
Add eul2quat function. Currently only accept scalar. Need to do input checking.
Add eul2quat function. Currently only accept scalar. Need to do input checking.
Python
bsd-3-clause
hamid-m/NavPy,adhika/NavPy,NavPy/NavPy
7608d0e89781f70fcb49e7dc3ee5cd57a094f18c
rx/__init__.py
rx/__init__.py
from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future }
from threading import Lock from .observable import Observable from .anonymousobservable import AnonymousObservable from .observer import Observer from . import checkedobserver from . import linq from . import backpressure try: from asyncio import Future except ImportError: Future = None # Rx configuration dictionary config = { "Future" : Future, "Lock" : Lock }
Make it possible to set custom Lock
Make it possible to set custom Lock
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY,dbrattli/RxPY
6f918da14582b31586c6fd9ef60259be9b05b314
filter_plugins/to_ini.py
filter_plugins/to_ini.py
def to_ini(databases = []): """ Custom ansible filter to print out pgbouncer database connection settings from a list of variable objects. """ s = '' for db in databases: for alias, config in db.iteritems(): s = s + str(alias) + ' = ' for key, value in config.iteritems(): s = s + str(key) + '=' + str(value) + ' ' s = s.rstrip() + '\n' return s.rstrip() class FilterModule(): def filters(self): return {'pgbouncer_to_ini': to_ini}
def to_ini(databases = []): """ Custom ansible filter to print out pgbouncer database connection settings from a list of variable objects. """ s = '' for db in databases: for alias, config in db.items(): s = s + str(alias) + ' = ' for key, value in config.items(): s = s + str(key) + '=' + str(value) + ' ' s = s.rstrip() + '\n' return s.rstrip() class FilterModule(): def filters(self): return {'pgbouncer_to_ini': to_ini}
Change iteritems to items for changes made in python 3
Change iteritems to items for changes made in python 3
Python
mit
jradtilbrook/ansible-role-pgbouncer
0353b9876967c0eb2fc742e8cbbd7a085a764312
bio_hansel/__init__.py
bio_hansel/__init__.py
# -*- coding: utf-8 -*- __version__ = '2.1.0' program_name = 'bio_hansel' program_summary = 'Subtype microbial genomes using SNV targeting k-mer subtyping schemes.' program_desc = program_summary + ''' Includes schemes for Salmonella enterica spp. enterica serovar Heidelberg and Enteritidis subtyping. Developed by Geneviève Labbé, James Robertson, Peter Kruczkiewicz, Marisa Rankin, Matthew Gopez, Chad R. Laing, Philip Mabon, Kim Ziebell, Aleisha R. Reimer, Lorelee Tschetter, Gary Van Domselaar, Sadjia Bekal, Kimberley A. MacDonald, Linda Hoang, Linda Chui, Danielle Daignault, Durda Slavic, Frank Pollari, E. Jane Parmley, Philip Mabon, Elissa Giang, Lok Kan Lee, Jonathan Moffat, Marisa Rankin, Joanne MacKinnon, Roger Johnson, John H.E. Nash. '''
# -*- coding: utf-8 -*- __version__ = '2.1.1' program_name = 'bio_hansel' program_summary = 'Subtype microbial genomes using SNV targeting k-mer subtyping schemes.' program_desc = program_summary + ''' Includes schemes for Salmonella enterica spp. enterica serovar Heidelberg and Enteritidis subtyping. Developed by Geneviève Labbé, James Robertson, Peter Kruczkiewicz, Marisa Rankin, Matthew Gopez, Chad R. Laing, Philip Mabon, Kim Ziebell, Aleisha R. Reimer, Lorelee Tschetter, Gary Van Domselaar, Sadjia Bekal, Kimberley A. MacDonald, Linda Hoang, Linda Chui, Danielle Daignault, Durda Slavic, Frank Pollari, E. Jane Parmley, Philip Mabon, Elissa Giang, Lok Kan Lee, Jonathan Moffat, Marisa Rankin, Joanne MacKinnon, Roger Johnson, John H.E. Nash. '''
Patch version bump to v2.1.1
Patch version bump to v2.1.1
Python
apache-2.0
phac-nml/bio_hansel
b200dec97f5aca7653a7ca219159d1f4acee15b1
dsub/_dsub_version.py
dsub/_dsub_version.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.4'
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.3.5.dev0'
Update dsub version to 0.3.5.dev0
Update dsub version to 0.3.5.dev0 PiperOrigin-RevId: 272470402
Python
apache-2.0
DataBiosphere/dsub,DataBiosphere/dsub
ca707d8e62a896a4702121faee02ff8d2491a842
grep_redone/grep/grep.py
grep_redone/grep/grep.py
import os class Searcher(object): """Searches files in dirs for specified string.""" def __init__(self, currentdir, string_to_search_for): self.currentdir = currentdir self.string_to_search_for = string_to_search_for self.get_files_in_currentdir() def search_files_in_dir_for_string(self, file_list): for f in file_list: self.search_file_for_string(f) def get_files_in_currentdir(self): # TODO implement iterator file; iterate lines file_list = [] for f in os.listdir(self.currentdir): if not os.path.isdir(f): file_list.append(f) if self.string_to_search_for: self.search_files_in_dir_for_string(file_list) else: for f in file_list: print f def search_file_for_string(self, f): with open(f) as f: for line in f: if self.string_to_search_for in line: print 'now' def search_subdir(self): pass
import os from clint.textui import colored class Searcher(object): """Searches files in dirs for specified string.""" def __init__(self, currentdir, string_to_search_for): self.currentdir = currentdir self.string_to_search_for = string_to_search_for self.get_files_in_currentdir() def get_files_in_currentdir(self): # TODO implement iterator file; iterate lines file_list = [] for f in os.listdir(self.currentdir): if not os.path.isdir(f): file_list.append(f) if self.string_to_search_for: matched_file_dict = self.search_files_in_dir_for_string(file_list) if matched_file_dict: self.print_nicely(matched_file_dict) else: for f in file_list: print f def search_files_in_dir_for_string(self, file_list): matched_file_dict = {} for f in file_list: matched_line_dict = self.search_file_for_string(f) if matched_line_dict: matched_file_dict[f] = matched_line_dict return matched_file_dict def search_file_for_string(self, f): matched_line_dict = {} with open(f) as f: for index, line in enumerate(f): if self.string_to_search_for in line: matched_line_dict[index] = line return matched_line_dict def print_nicely(self, matched_file_dict): for key, value in matched_file_dict.iteritems(): for k, v in value.iteritems(): print (colored.magenta('./' + key + ':', True, False) + str(k) + ':' + v),
Add basic output of found lines
Add basic output of found lines
Python
bsd-2-clause
florianbegusch/simple_grep,florianbegusch/simple_grep
0aa61fb32df9ae3ef9c465f4b246edf04897cd14
staticfiles/views.py
staticfiles/views.py
""" Views and functions for serving static files. These are only to be used during development, and SHOULD NOT be used in a production setting. """ from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ return django_serve(request, path='', document_root=resolve(path), show_indexes=show_indexes)
""" Views and functions for serving static files. These are only to be used during development, and SHOULD NOT be used in a production setting. """ from django import http from django.views.static import serve as django_serve from staticfiles.resolvers import resolve def serve(request, path, show_indexes=False): """ Serve static files from locations inferred from INSTALLED_APPS and STATICFILES_DIRS. To use, put a URL pattern such as:: (r'^(?P<path>.*)$', 'staticfiles.views.serve') in your URLconf. You may also set ``show_indexes`` to ``True`` if you'd like to serve a basic index of the directory. This index view will use the template hardcoded below, but if you'd like to override it, you can create a template called ``static/directory_index``. """ absolute_path = resolve(path) if not absolute_path: raise http.Http404('%r could not be resolved to a static file.' % path) return django_serve(request, path='', document_root=absolute_path, show_indexes=show_indexes)
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
Make the staticfiles serve view raise a 404 for paths which could not be resolved.
Python
bsd-3-clause
tusbar/django-staticfiles,jezdez-archive/django-staticfiles,tusbar/django-staticfiles
e640ed3770cd3c3dbab90866a77449d17a633704
wcsaxes/wcs_utils.py
wcsaxes/wcs_utils.py
# Adapted from Astropy core package until 1.0 is released # # Copyright (c) 2011-2014, Astropy Developers # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the Astropy Team nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import numpy as np
import numpy as np
Remove old LICENSE that was there for astropy-ported code
Remove old LICENSE that was there for astropy-ported code
Python
bsd-3-clause
stargaser/astropy,stargaser/astropy,saimn/astropy,astropy/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,pllim/astropy,mhvk/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,bsipocz/astropy,tbabej/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,larrybradley/astropy,larrybradley/astropy,joergdietrich/astropy,saimn/astropy,joergdietrich/astropy,bsipocz/astropy,tbabej/astropy,kelle/astropy,MSeifert04/astropy,larrybradley/astropy,bsipocz/astropy,astropy/astropy,joergdietrich/astropy,kelle/astropy,kelle/astropy,stargaser/astropy,astropy/astropy,lpsinger/astropy,pllim/astropy,lpsinger/astropy,tbabej/astropy,mhvk/astropy,MSeifert04/astropy,funbaker/astropy,MSeifert04/astropy,mhvk/astropy,saimn/astropy,StuartLittlefair/astropy,astropy/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,astropy/astropy,AustereCuriosity/astropy,pllim/astropy,larrybradley/astropy,larrybradley/astropy,funbaker/astropy,DougBurke/astropy,stargaser/astropy,mhvk/astropy,kelle/astropy,lpsinger/astropy,tbabej/astropy,dhomeier/astropy,pllim/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,funbaker/astropy,dhomeier/astropy,StuartLittlefair/astropy,tbabej/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,funbaker/astropy,pllim/astropy,AustereCuriosity/astropy,dhomeier/astropy,DougBurke/astropy,saimn/astropy,joergdietrich/astropy,bsipocz/astropy,kelle/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,lpsinger/astropy,dhomeier/astropy,AustereCuriosity/astropy
6ce216f449fbd7da5f8eea6785ce71542caa2fcd
mappyfile/__init__.py
mappyfile/__init__.py
import logging import pkg_resources import sys from types import ModuleType # allow high-level functions to be accessed directly from the mappyfile module from mappyfile.utils import load, loads, find, findall, dumps, write __version__ = "0.6.2" __all__ = ['load', 'loads', 'find', 'findall', 'dumps', 'write'] plugins = ModuleType('mappyfile.plugins') sys.modules['mappyfile.plugins'] = plugins for ep in pkg_resources.iter_entry_points(group='mappyfile.plugins'): setattr(plugins, ep.name, ep.load()) # Set default logging handler to avoid "No handler found" warnings. try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger("mappyfile").addHandler(NullHandler())
import logging import pkg_resources import sys from types import ModuleType # allow high-level functions to be accessed directly from the mappyfile module from mappyfile.utils import load, loads, find, findall, dumps, write __version__ = "0.6.2" __all__ = ['load', 'loads', 'find', 'findall', 'dumps', 'dump', 'write'] plugins = ModuleType('mappyfile.plugins') sys.modules['mappyfile.plugins'] = plugins for ep in pkg_resources.iter_entry_points(group='mappyfile.plugins'): setattr(plugins, ep.name, ep.load()) # Set default logging handler to avoid "No handler found" warnings. try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger("mappyfile").addHandler(NullHandler())
Add new dump public method
Add new dump public method
Python
mit
geographika/mappyfile,geographika/mappyfile