commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
7572df6e558479ebbe1c78f5671dc92450310330 | app.py | app.py | from flask import Flask, render_template, url_for, redirect, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('login.html')
@app.route('/save', methods=['POST'])
def save():
import pdb; pdb.set_trace()
return redirect(url_for('index'))
if __name__ == '__main__':
app.run(debug=True) | import json
from user import User
from flask import (Flask, render_template, url_for,
redirect, request, make_response, jsonify)
app = Flask(__name__)
def get_saved_data():
try:
data = json.loads(request.cookies.get('user'))
except TypeError:
data = {}
return data
@app.route('/')
def index():
data = get_saved_data()
return render_template('login.html', user=data)
@app.route('/save', methods=['POST', 'GET'])
def save():
if request.method == 'POST':
response = make_response(redirect(url_for('manage')))
data = get_saved_data()
data.update(dict(request.form.items()))
response.set_cookie('user', json.dumps(data))#dict(request.form.items())['username'])
return response
else:
return render_template('managelists.html', data=data)
@app.route('/manage', methods=['POST', 'GET'])
def manage():
if request.method == 'POST':
name = request.form['title']
description = request.form['description']
data = {"name":name, "description": description}
#return render_template('managelists.html', activity=data)
#return jsonify({"name":name, "description": description})
return render_template('managelists.html', data=data)
else:
data = get_saved_data()
return render_template('managelists.html', data=data)
if __name__ == '__main__':
app.run(debug=True) | Add add bucket list feature | Add add bucket list feature
| Python | mit | mkiterian/bucket-list-app,mkiterian/bucket-list-app,mkiterian/bucket-list-app |
60e92f0a085bf7f4cb9f326085e3d4aba11f3594 | bot.py | bot.py | from flask import Flask
from flow import Flow
from config import ORG_ID, CHANNEL_ID
flow = Flow('botbotbot')
app = Flask(__name__)
@app.route('/')
def index():
flow.send_message(ORG_ID, CHANNEL_ID, 'botbotbot')
return 'foo'
if __name__ == "__main__":
app.run()
| import json
import requests
from flask import Flask, request
from flow import Flow
from config import ORG_ID, CHANNEL_ID
flow = Flow('botbotbot')
app = Flask(__name__)
@app.route('/')
def index():
flow.send_message(ORG_ID, CHANNEL_ID, 'botbotbot')
return 'foo'
@app.route('/deployments/', methods=['POST'])
def failures():
data = json.loads(request.data.decode('utf-8'))
message_type = data['Type']
if message_type == 'SubscriptionConfirmation':
confirmation = requests.get(data['SubscribeURL'])
elif message_type == 'Notification':
message_data = json.loads(data['Message'])
message = '{applicationName} ({deploymentGroupName}) deployment has the status {status}'.format(**message_data)
flow.send_message(ORG_ID, CHANNEL_ID, message)
return 'foop'
if __name__ == "__main__":
import sys
port = int(sys.argv[1])
app.run(port=port, debug=True)
| Add actual things that do real stuff | Add actual things that do real stuff
| Python | mit | datamade/semabot,datamade/semabot |
7a17facf68a90d246b4bee55491c9495a8c5ca50 | tg/dottednames/jinja_lookup.py | tg/dottednames/jinja_lookup.py | """Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.html'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
with file(template, "r") as fd:
source = fd.read().decode('utf-8')
return source, template, lambda: mtime == getmtime(template)
| """Genshi template loader that supports dotted names."""
from os.path import exists, getmtime
from jinja2.exceptions import TemplateNotFound
from jinja2.loaders import FileSystemLoader
from tg import config
class JinjaTemplateLoader(FileSystemLoader):
"""Jinja template loader supporting dotted filenames. Based on Genshi Loader
"""
template_extension = '.html'
def get_source(self, environment, template):
# Check if dottedname
if not template.endswith(self.template_extension):
# Get the actual filename from dotted finder
finder = config['pylons.app_globals'].dotted_filename_finder
template = finder.get_dotted_filename(
template_name=template,
template_extension=self.template_extension)
else:
return FileSystemLoader.get_source(self, environment, template)
# Check if the template exists
if not exists(template):
raise TemplateNotFound(template)
# Get modification time
mtime = getmtime(template)
# Read the source
fd = file(template)
try:
source = fd.read().decode('utf-8')
finally:
fd.close()
return source, template, lambda: mtime == getmtime(template)
| Make JinjaTemplateLoader work with Python 2.4. | Make JinjaTemplateLoader work with Python 2.4.
| Python | mit | lucius-feng/tg2,lucius-feng/tg2 |
2147df557bfb922fd640e2da1b105a60644dece0 | src/main.py | src/main.py | import webapp2
import settings
class SampleIndex(webapp2.RequestHandler):
"""Stub request handler"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write("helloworld")
application = webapp2.WSGIApplication([
('/', SampleIndex),
], debug=settings.DEBUG)
| import webapp2
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
class SampleIndex(webapp2.RequestHandler):
"""Stub request handler"""
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write("helloworld")
application = webapp2.WSGIApplication([
('/', SampleIndex),
], debug=DEBUG)
| Determine DEBUG flag at runtime - if we are under the SDK, we are debugging | Determine DEBUG flag at runtime - if we are under the SDK, we are debugging
| Python | apache-2.0 | rbanffy/testable_appengine,rbanffy/testable_appengine |
ddb64a0b7a09203c8367c47d34ac29a82af012c0 | produceEports.py | produceEports.py | #!/usr/bin/env python
from app.views.export import write_all_measurements_csv
import tempfile
import os
f = open("{0}/app/static/exports/AllMeasurements_inprogress.csv".format(os.path.dirname(os.path.realpath(__file__))), "w")
try:
write_all_measurements_csv(f)
finally:
f.close
os.rename("app/static/exports/AllMeasurements_inprogress.csv", "app/static/exports/AllMeasurements.csv")
| #!/usr/bin/env python
from app.views.export import write_all_measurements_csv
import tempfile
import os
exportDirectory = "{0}/app/static/exports".format(os.path.dirname(os.path.realpath(__file__))
workingFile = "{0}/AllMeasurements_inprogress.csv".format(exportDirectory)
finalFile = "{0}/AllMeasurements.csv".format(exportDirectory)
f = open(workingFile, "w")
try:
write_all_measurements_csv(f)
finally:
f.close
os.rename(workingFile, finalFile)
| Use directory for all interaction - duh! | Use directory for all interaction - duh!
| Python | mit | rabramley/telomere,rabramley/telomere,rabramley/telomere |
8a544ac2db71d4041c77fdb0ddfe27b84b565bb5 | salt/utils/saltminionservice.py | salt/utils/saltminionservice.py | # Import salt libs
from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
import win32api
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def console_event_handler(event):
if event == 5:
# Do nothing on CTRL_LOGOFF_EVENT
return True
return False
def _main():
win32api.SetConsoleCtrlHandler(console_event_handler, 1)
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| # Import salt libs
from salt.utils.winservice import Service, instart
import salt
# Import third party libs
import win32serviceutil
import win32service
import winerror
# Import python libs
import sys
class MinionService(Service):
def start(self):
self.runflag = True
self.log("Starting the Salt Minion")
minion = salt.Minion()
minion.start()
while self.runflag:
pass
#self.sleep(10)
#self.log("I'm alive ...")
def stop(self):
self.runflag = False
self.log("Shutting down the Salt Minion")
def _main():
servicename = 'salt-minion'
try:
status = win32serviceutil.QueryServiceStatus(servicename)
except win32service.error as details:
if details[0] == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
instart(MinionService, servicename, 'Salt Minion')
sys.exit(0)
if status[1] == win32service.SERVICE_RUNNING:
win32serviceutil.StopServiceWithDeps(servicename)
win32serviceutil.StartService(servicename)
else:
win32serviceutil.StartService(servicename)
if __name__ == '__main__':
_main()
| Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service" | Revert "Catch and ignore CTRL_LOGOFF_EVENT when run as a windows service"
This reverts commit a7ddf81b37b578b1448f83b0efb4f7116de0c3fb.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
eeb8057fb5ff65eb89e3b5a8ff94bf58adc511ee | utils/lit/tests/test-output.py | utils/lit/tests/test-output.py | # RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
# RUN: FileCheck < %t.results.out %s
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK-NEXT: "name": "test-data :: bad&name.ini",
# CHECK-NEXT: "output": "& < > \""
# CHECK-NEXT: },
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK-NEXT: "name": "test-data :: metrics.ini",
# CHECK-NEXT: "output": "Test passed."
# CHECK-NEXT: }
# CHECK-NEXT: ]
# CHECK-NEXT: }
| # RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out
# RUN: FileCheck < %t.results.out %s
# CHECK: {
# CHECK: "__version__"
# CHECK: "elapsed"
# CHECK-NEXT: "tests": [
# CHECK-NEXT: {
# CHECK-NEXT: "code": "PASS",
# CHECK-NEXT: "elapsed": {{[0-9.]+}},
# CHECK-NEXT: "metrics": {
# CHECK-NEXT: "value0": 1,
# CHECK-NEXT: "value1": 2.3456
# CHECK-NEXT: }
# CHECK: "name": "test-data :: bad&name.ini",
# CHECK: "output": "& < > \""
# CHECK: ]
# CHECK-NEXT: }
| Refactor test incase results are backwards | Refactor test incase results are backwards
Looks like results can come in either way in this file. Loosen the ordering constraints.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@331945 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm |
356c56d7ebb2cc8e837308536c085b8dd399b01f | run.py | run.py | #!/usr/bin/env python
"""
TODO: Modify module doc.
"""
from __future__ import division
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "7/30/14"
import os
os.environ["FLAMYNGO"] = os.path.join(os.environ["HOME"], ".flamyngo.yaml")
from flamyngo import app
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(debug=True, host='0.0.0.0', port=port)
| #!/usr/bin/env python
"""
TODO: Modify module doc.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Virtual Lab"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "7/30/14"
import os
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="""flamyngo is a basic Flask frontend for querying MongoDB collections""",
epilog="Author: Shyue Ping Ong")
parser.add_argument(
"-c", "--config", dest="config", type=str, nargs="?",
default=os.path.join(os.environ["HOME"], ".flamyngo.yaml"),
help="YAML file where the config is stored")
args = parser.parse_args()
port = int(os.environ.get("PORT", 5000))
os.environ["FLAMYNGO"] = args.config
from flamyngo import app
app.run(debug=True, host='0.0.0.0', port=port)
| Use argparse for more flexible usage. | Use argparse for more flexible usage.
| Python | bsd-3-clause | materialsvirtuallab/flamyngo,materialsvirtuallab/flamyngo,materialsvirtuallab/flamyngo |
b22b8c2249dc64d99e297dfe2ca24abbf30ec00d | st2common/st2common/models/api/stormbase.py | st2common/st2common/models/api/stormbase.py | from wsme import types as wtypes
from mirantas.resource import Resource
class BaseAPI(Resource):
# TODO: Does URI need a custom type?
uri = wtypes.text
name = wtypes.text
description = wtypes.text
id = wtypes.text
| from wsme import types as wtypes
from mirantis.resource import Resource
class BaseAPI(Resource):
# TODO: Does URI need a custom type?
uri = wtypes.text
name = wtypes.text
description = wtypes.text
id = wtypes.text
| Implement Staction Controller * Fixing mis-typed name. | [STORM-1] Implement Staction Controller
* Fixing mis-typed name.
| Python | apache-2.0 | punalpatel/st2,lakshmi-kannan/st2,armab/st2,StackStorm/st2,jtopjian/st2,emedvedev/st2,pixelrebel/st2,alfasin/st2,Plexxi/st2,pinterb/st2,Itxaka/st2,pixelrebel/st2,nzlosh/st2,nzlosh/st2,peak6/st2,grengojbo/st2,pinterb/st2,emedvedev/st2,lakshmi-kannan/st2,alfasin/st2,Itxaka/st2,punalpatel/st2,StackStorm/st2,StackStorm/st2,jtopjian/st2,dennybaa/st2,pixelrebel/st2,Plexxi/st2,dennybaa/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,Plexxi/st2,emedvedev/st2,alfasin/st2,Itxaka/st2,peak6/st2,grengojbo/st2,nzlosh/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,pinterb/st2,lakshmi-kannan/st2,armab/st2,armab/st2,jtopjian/st2,grengojbo/st2,tonybaloney/st2,punalpatel/st2,nzlosh/st2 |
b367e2919c0de02f3514dfac5c890ffd70603918 | src/nodeconductor_assembly_waldur/experts/filters.py | src/nodeconductor_assembly_waldur/experts/filters.py | import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
'name',
'type',
'state',
'customer_name',
'project_name',
'created',
'modified',
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| import django_filters
from nodeconductor.core import filters as core_filters
from . import models
class ExpertProviderFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(view_name='customer-detail', name='customer__uuid')
customer_uuid = django_filters.UUIDFilter(name='customer__uuid')
class Meta(object):
model = models.ExpertProvider
fields = []
class ExpertRequestFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='icontains')
project = core_filters.URLFilter(view_name='project-detail', name='project__uuid')
project_uuid = django_filters.UUIDFilter(name='project__uuid')
o = django_filters.OrderingFilter(fields=(
('name', 'name'),
('type', 'type'),
('state', 'state'),
('project__customer__name', 'customer_name'),
('project__name', 'project_name'),
('created', 'created'),
('modified', 'modified'),
))
class Meta(object):
model = models.ExpertRequest
fields = ['state']
class ExpertBidFilter(django_filters.FilterSet):
request = core_filters.URLFilter(view_name='expert-request-detail', name='request__uuid')
request_uuid = django_filters.UUIDFilter(name='request__uuid')
class Meta(object):
model = models.ExpertBid
fields = []
| Fix expert request filter by customer and project name. | Fix expert request filter by customer and project name.
| Python | mit | opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur |
2f8c3ab7ecd0606069d524192c551e7be77ca461 | zhihudaily/views/with_image.py | zhihudaily/views/with_image.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import datetime
from flask import render_template, Blueprint
from zhihudaily.utils import make_request
from zhihudaily.cache import cache
image_ui = Blueprint('image_ui', __name__, template_folder='templates')
@image_ui.route('/withimage')
@cache.cached(timeout=900)
def with_image():
"""The page for 图片 UI."""
r = make_request('http://news.at.zhihu.com/api/1.2/news/latest')
(display_date, date, news_list) = get_news_info(r)
news_list = handle_image(news_list)
day_before = (
datetime.datetime.strptime(date, '%Y%m%d') - datetime.timedelta(1)
).strftime('%Y%m%d')
return render_template('with_image.html', lists=news_list,
display_date=display_date,
day_before=day_before,
is_today=True)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from flask import render_template, Blueprint, json
from zhihudaily.cache import cache
from zhihudaily.models import Zhihudaily
from zhihudaily.utils import Date
image_ui = Blueprint('image_ui', __name__, template_folder='templates')
@image_ui.route('/withimage')
@cache.cached(timeout=900)
def with_image():
"""The page for 图片 UI."""
day = Date()
news = Zhihudaily.select().where(Zhihudaily.date == int(day.today)).get()
return render_template('with_image.html',
lists=json.loads(news.json_news),
display_date=news.display_date,
day_before=day.day_before,
is_today=True)
| Switch to use database for image ui | Switch to use database for image ui
| Python | mit | lord63/zhihudaily,lord63/zhihudaily,lord63/zhihudaily |
5c405745c954c2aa6121ddd82fb13ffef11b3150 | pyp2rpm/utils.py | pyp2rpm/utils.py | import functools
from pyp2rpm import settings
def memoize_by_args(func):
"""Memoizes return value of a func based on args."""
memory = {}
@functools.wraps(func)
def memoized(*args):
if not args in memory.keys():
value = func(*args)
memory[args] = value
return memory[args]
return memoized
def license_from_trove(trove):
"""Finds out license from list of trove classifiers.
Args:
trove: list of trove classifiers
Returns:
Fedora name of the package license or empty string, if no licensing information is found in trove classifiers.
"""
license = []
for classifier in trove:
if classifier is None: continue
if 'License' in classifier != -1:
stripped = classifier.strip()
# if taken from EGG-INFO, begins with Classifier:
stripped = stripped[stripped.find('License'):]
if stripped in settings.TROVE_LICENSES:
license.append(settings.TROVE_LICENSES[stripped])
else:
license.append("Unknown License")
return ' and '.join(license)
| import functools
from pyp2rpm import settings
def memoize_by_args(func):
"""Memoizes return value of a func based on args."""
memory = {}
@functools.wraps(func)
def memoized(*args):
if not args in memory.keys():
value = func(*args)
memory[args] = value
return memory[args]
return memoized
def license_from_trove(trove):
"""Finds out license from list of trove classifiers.
Args:
trove: list of trove classifiers
Returns:
Fedora name of the package license or empty string, if no licensing information is found in trove classifiers.
"""
license = []
for classifier in trove:
if classifier is None: continue
if 'License' in classifier != -1:
stripped = classifier.strip()
# if taken from EGG-INFO, begins with Classifier:
stripped = stripped[stripped.find('License'):]
if stripped in settings.TROVE_LICENSES:
license.append(settings.TROVE_LICENSES[stripped])
return ' and '.join(license)
| Revert the commit "bc85b4e" to keep the current solution | Revert the commit "bc85b4e" to keep the current solution
| Python | mit | henrysher/spec4pypi |
ab81837b707280b960ca02675a85da7918d17fec | setuptools/command/bdist_rpm.py | setuptools/command/bdist_rpm.py | # This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
def run(self):
self.run_command('egg_info') # ensure distro name is up-to-date
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version '+version
line24 = '%define version '+rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23,line24)
for line in spec
]
spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
return spec
| # This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def initialize_options(self):
_bdist_rpm.initialize_options(self)
self.no_egg = None
def run(self):
# ensure distro name is up-to-date
self.run_command('egg_info')
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version
line24 = '%define version ' + rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23, line24)
for line in spec
]
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec
| Adjust to match modern style conventions. | Adjust to match modern style conventions.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools |
58eb4b2b034d90f45b3daa12900f24a390bb4782 | setuptools/command/bdist_rpm.py | setuptools/command/bdist_rpm.py | # This is just a kludge so that bdist_rpm doesn't guess wrong about the
# distribution name and version, if the egg_info command is going to alter
# them, another kludge to allow you to build old-style non-egg RPMs.
from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
def run(self):
# ensure distro name is up-to-date
self.run_command('egg_info')
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version
line24 = '%define version ' + rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23, line24)
for line in spec
]
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec
| from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
class bdist_rpm(_bdist_rpm):
"""
Override the default bdist_rpm behavior to do the following:
1. Run egg_info to ensure the name and version are properly calculated.
2. Always run 'install' using --single-version-externally-managed to
disable eggs in RPM distributions.
3. Replace dash with underscore in the version numbers for better RPM
compatibility.
"""
def run(self):
# ensure distro name is up-to-date
self.run_command('egg_info')
_bdist_rpm.run(self)
def _make_spec_file(self):
version = self.distribution.get_version()
rpmversion = version.replace('-','_')
spec = _bdist_rpm._make_spec_file(self)
line23 = '%define version ' + version
line24 = '%define version ' + rpmversion
spec = [
line.replace(
"Source0: %{name}-%{version}.tar",
"Source0: %{name}-%{unmangled_version}.tar"
).replace(
"setup.py install ",
"setup.py install --single-version-externally-managed "
).replace(
"%setup",
"%setup -n %{name}-%{unmangled_version}"
).replace(line23, line24)
for line in spec
]
insert_loc = spec.index(line24) + 1
unmangled_version = "%define unmangled_version " + version
spec.insert(insert_loc, unmangled_version)
return spec
| Replace outdated deprecating comments with a proper doc string. | Replace outdated deprecating comments with a proper doc string.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools |
678532961cbc676fb3b82fa58185b281a8a4a7b3 | rex/preconstrained_file_stream.py | rex/preconstrained_file_stream.py |
from angr.state_plugins.plugin import SimStatePlugin
from angr.storage.file import SimFileStream
class SimPreconstrainedFileStream(SimFileStream):
def __init__(self, name, preconstraining_handler=None, **kwargs):
super().__init__(name, **kwargs)
self.preconstraining_handler = preconstraining_handler
self._attempted_preconstraining = False
def read(self, pos, size, **kwargs):
if not self._attempted_preconstraining:
self._attempted_preconstraining = True
self.preconstraining_handler(self)
return super().read(pos, size, **kwargs)
@SimStatePlugin.memo
def copy(self, memo):
copied = super().copy(memo)
copied.preconstraining_handler = self.preconstraining_handler
copied._attempted_preconstraining = self._attempted_preconstraining
return copied
|
from angr.state_plugins.plugin import SimStatePlugin
from angr.storage.file import SimFileStream
class SimPreconstrainedFileStream(SimFileStream):
def __init__(self, name, preconstraining_handler=None, **kwargs):
super().__init__(name, **kwargs)
self.preconstraining_handler = preconstraining_handler
self._attempted_preconstraining = False
def read(self, pos, size, **kwargs):
if not self._attempted_preconstraining:
self._attempted_preconstraining = True
self.preconstraining_handler(self)
return super().read(pos, size, **kwargs)
@SimStatePlugin.memo
def copy(self, memo):
copied = super().copy(memo)
copied.preconstraining_handler = self.preconstraining_handler
copied._attempted_preconstraining = self._attempted_preconstraining
return copied
def __setstate__(self, state):
for attr, value in state.items():
setattr(self, attr, value)
def __getstate__(self):
d = super().__getstate__()
d['preconstraining_handler'] = None
return d
| Fix a bug that leads to failures in pickling. | SimPreconstrainedFileStream: Fix a bug that leads to failures in pickling.
| Python | bsd-2-clause | shellphish/rex,shellphish/rex |
91f503cd99dfa6fc6562afc1b627b6f8b0f1d91b | addons/l10n_ar/models/res_partner_bank.py | addons/l10n_ar/models/res_partner_bank.py | # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, api, _
import stdnum.ar.cbu
def validate_cbu(cbu):
return stdnum.ar.cbu.validate(cbu)
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
@api.model
def _get_supported_account_types(self):
""" Add new account type named cbu used in Argentina """
res = super()._get_supported_account_types()
res.append(('cbu', _('CBU')))
return res
@api.model
def retrieve_acc_type(self, acc_number):
try:
validate_cbu(acc_number)
except Exception:
return super().retrieve_acc_type(acc_number)
return 'cbu'
| # Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, api, _
from odoo.exceptions import ValidationError
import stdnum.ar
import logging
_logger = logging.getLogger(__name__)
def validate_cbu(cbu):
try:
return stdnum.ar.cbu.validate(cbu)
except Exception as error:
msg = _("Argentinian CBU was not validated: %s" % repr(error))
_logger.log(25, msg)
raise ValidationError(msg)
class ResPartnerBank(models.Model):
_inherit = 'res.partner.bank'
@api.model
def _get_supported_account_types(self):
""" Add new account type named cbu used in Argentina """
res = super()._get_supported_account_types()
res.append(('cbu', _('CBU')))
return res
@api.model
def retrieve_acc_type(self, acc_number):
try:
validate_cbu(acc_number)
except Exception:
return super().retrieve_acc_type(acc_number)
return 'cbu'
| Fix ImportError: No module named 'stdnum.ar.cbu' | [FIX] l10n_ar: Fix ImportError: No module named 'stdnum.ar.cbu'
Since stdnum.ar.cbu is not available in odoo saas enviroment because is
using an old version of stdnum package, we add a try exept in order to
catch this and manage the error properly which is raise an exception and
leave a message in the log telling the user that the cbu was not able to
validate.
closes odoo/odoo#40383
X-original-commit: 25d483fc3fc05fd47c72c3d96c02fed12b998b0d
Signed-off-by: Josse Colpaert <[email protected]>
| Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo |
5cd0507e99d8f78597d225266ec09f6588308396 | tests/app/public_contracts/test_POST_notification.py | tests/app/public_contracts/test_POST_notification.py | from flask import json
from . import return_json_from_response, validate_v0
from tests import create_authorization_header
def _post_notification(client, template, url, to):
data = {
'to': to,
'template': str(template.id)
}
auth_header = create_authorization_header(service_id=template.service_id)
return client.post(
path=url,
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
def test_post_sms_contract(client, mocker, sample_template):
mocker.patch('app.celery.tasks.send_sms.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
response_json = return_json_from_response(_post_notification(
client, sample_template, url='/notifications/sms', to='07700 900 855'
))
validate_v0(response_json, 'POST_notification_return_sms.json')
def test_post_email_contract(client, mocker, sample_email_template):
mocker.patch('app.celery.tasks.send_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
response_json = return_json_from_response(_post_notification(
client, sample_email_template, url='/notifications/email', to='[email protected]'
))
validate_v0(response_json, 'POST_notification_return_email.json')
| from flask import json
from . import return_json_from_response, validate_v0
from tests import create_authorization_header
def _post_notification(client, template, url, to):
data = {
'to': to,
'template': str(template.id)
}
auth_header = create_authorization_header(service_id=template.service_id)
return client.post(
path=url,
data=json.dumps(data),
headers=[('Content-Type', 'application/json'), auth_header]
)
def test_post_sms_contract(client, mocker, sample_template):
mocker.patch('app.celery.provider_tasks.deliver_sms.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
response_json = return_json_from_response(_post_notification(
client, sample_template, url='/notifications/sms', to='07700 900 855'
))
validate_v0(response_json, 'POST_notification_return_sms.json')
def test_post_email_contract(client, mocker, sample_email_template):
mocker.patch('app.celery.provider_tasks.deliver_email.apply_async')
mocker.patch('app.encryption.encrypt', return_value="something_encrypted")
response_json = return_json_from_response(_post_notification(
client, sample_email_template, url='/notifications/email', to='[email protected]'
))
validate_v0(response_json, 'POST_notification_return_email.json')
| Revert "Fixed faoiling jenkins tests. Mocked the required functions" | Revert "Fixed faoiling jenkins tests. Mocked the required functions"
This reverts commit 4b60c8dadaa413581cd373c9059ff95ecf751159.
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
4467ffe669eec09bab16f4e5a3256ed333c5d3d5 | rcamp/lib/ldap_utils.py | rcamp/lib/ldap_utils.py | from django.conf import settings
from ldapdb import escape_ldap_filter
import ldap
def authenticate(dn,pwd,ldap_conf_key):
# Setup connection
ldap_conf = settings.LDAPCONFS[ldap_conf_key]
server = ldap_conf['server']
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
conn = ldap.initialize(server)
# Authenticate
try:
conn.simple_bind_s(dn, pwd)
return True
except ldap.INVALID_CREDENTIALS:
return False
def get_suffixed_username(username,organization):
try:
suffix = settings.ORGANIZATION_INFO[organization]['suffix']
except KeyError:
suffix = None
suffixed_username = username
if suffix:
suffixed_username = '{0}@{1}'.format(username,suffix)
return suffixed_username
def get_ldap_username_and_org(suffixed_username):
username = suffixed_username
org = 'ucb'
if '@' in suffixed_username:
username, suffix = suffixed_username.rsplit('@',1)
for k,v in settings.ORGANIZATION_INFO.iteritems():
if v['suffix'] == suffix:
org = k
break
return username, org
| from django.conf import settings
from ldapdb import escape_ldap_filter
import ldap
def authenticate(dn,pwd,ldap_conf_key):
# Setup connection
ldap_conf = settings.LDAPCONFS[ldap_conf_key]
server = ldap_conf['server']
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_ALLOW)
conn = ldap.initialize(server, bytes_mode=False)
# Authenticate
try:
conn.simple_bind_s(dn, pwd)
return True
except ldap.INVALID_CREDENTIALS:
return False
def get_suffixed_username(username,organization):
try:
suffix = settings.ORGANIZATION_INFO[organization]['suffix']
except KeyError:
suffix = None
suffixed_username = username
if suffix:
suffixed_username = '{0}@{1}'.format(username,suffix)
return suffixed_username
def get_ldap_username_and_org(suffixed_username):
username = suffixed_username
org = 'ucb'
if '@' in suffixed_username:
username, suffix = suffixed_username.rsplit('@',1)
for k,v in settings.ORGANIZATION_INFO.iteritems():
if v['suffix'] == suffix:
org = k
break
return username, org
| Set bytes_mode=False for future compatability with Python3 | Set bytes_mode=False for future compatability with Python3
| Python | mit | ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP |
c872b9991ec1a80d03906cebfb43e71335ba9c26 | tests/run/generator_frame_cycle.py | tests/run/generator_frame_cycle.py | # mode: run
# tag: generator
import cython
import sys
def test_generator_frame_cycle():
"""
>>> test_generator_frame_cycle()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
del g
if cython.compiled:
# FIXME: this should not be necessary, but I can't see how to do it...
import gc; gc.collect()
return tuple(testit)
def test_generator_frame_cycle_with_outer_exc():
"""
>>> test_generator_frame_cycle_with_outer_exc()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
try:
raise ValueError()
except ValueError as exc:
assert sys.exc_info()[1] is exc, sys.exc_info()
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
assert sys.exc_info()[1] is exc, sys.exc_info()
del g
assert sys.exc_info()[1] is exc, sys.exc_info()
if cython.compiled:
# FIXME: this should not be necessary, but I can't see how to do it...
import gc; gc.collect()
return tuple(testit)
| # mode: run
# tag: generator
import cython
import sys
def test_generator_frame_cycle():
"""
>>> test_generator_frame_cycle()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
del g
return tuple(testit)
def test_generator_frame_cycle_with_outer_exc():
"""
>>> test_generator_frame_cycle_with_outer_exc()
("I'm done",)
"""
testit = []
def whoo():
try:
yield
except:
yield
finally:
testit.append("I'm done")
g = whoo()
next(g)
try:
raise ValueError()
except ValueError as exc:
assert sys.exc_info()[1] is exc, sys.exc_info()
# Frame object cycle
eval('g.throw(ValueError)', {'g': g})
# CPython 3.3 handles this incorrectly itself :)
if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
assert sys.exc_info()[1] is exc, sys.exc_info()
del g
if cython.compiled or sys.version_info[:2] not in [(3, 2), (3, 3)]:
assert sys.exc_info()[1] is exc, sys.exc_info()
return tuple(testit)
| Fix a CPython comparison test in CPython 3.3 which was apparently fixed only in 3.4 and later. | Fix a CPython comparison test in CPython 3.3 which was apparently fixed only in 3.4 and later.
| Python | apache-2.0 | cython/cython,cython/cython,da-woods/cython,scoder/cython,cython/cython,scoder/cython,scoder/cython,cython/cython,da-woods/cython,da-woods/cython,scoder/cython,da-woods/cython |
e07db6a58217baf555b424d66f8996ec4bc7a02f | edgedb/lang/common/doc/sphinx/default_conf.py | edgedb/lang/common/doc/sphinx/default_conf.py | ##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
"""Default Sphinx configuration file for metamagic projects"""
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
intersphinx_mapping = {'python': ('http://docs.python.org/3.2', None)}
autoclass_content = 'both'
| ##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
"""Default Sphinx configuration file for metamagic projects"""
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo',
'sphinx.ext.coverage', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
intersphinx_mapping = {'python': ('http://docs.python.org/3.3', None)}
autoclass_content = 'both'
| Drop json, bump copyright and Python version for intersphinx | doc: Drop json, bump copyright and Python version for intersphinx
| Python | apache-2.0 | edgedb/edgedb,edgedb/edgedb,edgedb/edgedb |
88f699690a48bc9e204c561443a53ca03dcf1ae6 | test/python_api/default-constructor/sb_type.py | test/python_api/default-constructor/sb_type.py | """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetName()
obj.GetByteSize()
#obj.GetEncoding(5)
obj.GetNumberChildren(True)
member = lldb.SBTypeMember()
obj.GetChildAtIndex(True, 0, member)
obj.GetChildIndexForName(True, "_member_field")
obj.IsAPointerType()
obj.GetPointeeType()
obj.GetDescription(lldb.SBStream())
| """
Fuzz tests an object after the default construction to make sure it does not crash lldb.
"""
import sys
import lldb
def fuzz_obj(obj):
obj.GetName()
obj.GetByteSize()
#obj.GetEncoding(5)
obj.GetNumberChildren(True)
member = lldb.SBTypeMember()
obj.GetChildAtIndex(True, 0, member)
obj.GetChildIndexForName(True, "_member_field")
obj.IsAPointerType()
obj.GetPointeeType()
obj.GetDescription(lldb.SBStream())
obj.IsPointerType(None)
lldb.SBType.IsPointerType(None)
| Add fuzz calls for SBType::IsPointerType(void *opaque_type). | Add fuzz calls for SBType::IsPointerType(void *opaque_type).
git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@134551 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb |
4636c9394138534fc39cc5bdac373b97919ffd01 | server/info/services.py | server/info/services.py | """info services."""
from info.models import Article, News, Column
def get_column_object(uid):
"""Get column object."""
try:
obj = Column.objects.get(uid=uid)
except Column.DoesNotExist:
obj = None
return obj
def get_articles_by_column(uid):
"""Get_articles_by_column."""
queryset = Article.objects.filter(column__uid=uid).order_by('id')
return queryset
def get_columns_queryset():
"""Get_columns_queryset."""
queryset = Column.objects.all().order_by('-id')
return queryset
def get_article_queryset():
"""Get article queryset."""
queryset = Article.objects.all().order_by('-id')
return queryset
def get_article_object(uid):
"""Get article object."""
return Article.objects.get(uid=uid)
def get_news_queryset():
"""Get news queryset."""
return News.objects.all().order_by('-id')
| """info services."""
from info.models import Article, News, Column
def get_column_object(uid):
"""Get column object."""
try:
obj = Column.objects.get(uid=uid)
except Column.DoesNotExist:
obj = None
return obj
def get_articles_by_column(uid):
"""Get_articles_by_column."""
queryset = Article.objects.filter(
column__uid=uid
).order_by('id')
return queryset
def get_columns_queryset():
"""Get_columns_queryset."""
queryset = Column.objects.all().only('uid', 'name').order_by('-id')
return queryset
def get_article_queryset():
"""Get article queryset."""
queryset = Article.objects.all().order_by('-id')
return queryset
def get_article_object(uid):
"""Get article object."""
return Article.objects.get(uid=uid)
def get_news_queryset():
"""Get news queryset."""
return News.objects.all().order_by('-id')
| Modify django orm filter, add only | Modify django orm filter, add only
| Python | mit | istommao/codingcatweb,istommao/codingcatweb,istommao/codingcatweb |
b46727a6bf8c1d85e0f9f8828954440bc489f247 | panoptes_client/user.py | panoptes_client/user.py | from __future__ import absolute_import, division, print_function
from panoptes_client.panoptes import PanoptesObject, LinkResolver
class User(PanoptesObject):
_api_slug = 'users'
_link_slug = 'users'
_edit_attributes = ()
def avatar(self):
return User.http_get('{}/avatar'.format(self.id))[0]
LinkResolver.register(User)
LinkResolver.register(User, 'owner')
| from __future__ import absolute_import, division, print_function
from panoptes_client.panoptes import PanoptesObject, LinkResolver
class User(PanoptesObject):
_api_slug = 'users'
_link_slug = 'users'
_edit_attributes = ()
@property
def avatar(self):
return User.http_get('{}/avatar'.format(self.id))[0]
LinkResolver.register(User)
LinkResolver.register(User, 'owner')
| Change User.avatar to be a property | Change User.avatar to be a property
| Python | apache-2.0 | zooniverse/panoptes-python-client |
22230205402f7de77049da9c0f716d4fdc3099c3 | vdt/versionplugin/wheel/package.py | vdt/versionplugin/wheel/package.py | from glob import glob
import imp
import logging
import os
import subprocess
import mock
from setuptools import setup as _setup
from vdt.versionplugin.wheel.shared import parse_version_extra_args
from vdt.versionplugin.wheel.utils import WheelRunningDistribution
logger = logging.getLogger(__name__)
def build_package(version):
"""
In here should go code that runs you package building scripts.
"""
def fixed_version_setup(*args, **kwargs):
old_version = kwargs.pop('version')
base_version = ".".join(map(str, version.version))
python_version = "%src%s" % (base_version, version.build_number)
logging.info(
"Version in file is %s, using %s" % (
old_version, python_version))
_setup(
version=python_version,
distclass=WheelRunningDistribution, *args, **kwargs)
args, extra_args = parse_version_extra_args(version.extra_args)
with version.checkout_tag:
with mock.patch('setuptools.setup', fixed_version_setup):
imp.load_source('packagesetup', 'setup.py')
if args.build_dependencies:
build_dir = "%s/dist/" % os.getcwd()
wheels = glob("%s/*.whl" % build_dir)
cmd = ['pip', 'wheel'] + wheels
logger.debug("Running command {0}".format(" ".join(cmd)))
logger.debug(subprocess.check_output(cmd, cwd=build_dir))
return 0
| from glob import glob
import imp
import logging
import os
import subprocess
import mock
from setuptools import setup as _setup
from vdt.versionplugin.wheel.shared import parse_version_extra_args
from vdt.versionplugin.wheel.utils import WheelRunningDistribution
logger = logging.getLogger(__name__)
def build_package(version):
"""
In here should go code that runs you package building scripts.
"""
def fixed_version_setup(*args, **kwargs):
old_version = kwargs.pop('version')
base_version = ".".join(map(str, version.version))
python_version = base_version
if version.build_number is not None:
python_version = "%src%s" % (base_version, version.build_number)
logging.info(
"Version in file is %s, using %s" % (
old_version, python_version))
_setup(
version=python_version,
distclass=WheelRunningDistribution, *args, **kwargs)
args, extra_args = parse_version_extra_args(version.extra_args)
with version.checkout_tag:
with mock.patch('setuptools.setup', fixed_version_setup):
imp.load_source('packagesetup', 'setup.py')
if args.build_dependencies:
build_dir = "%s/dist/" % os.getcwd()
wheels = glob("%s/*.whl" % build_dir)
cmd = ['pip', 'wheel'] + wheels
logger.debug("Running command {0}".format(" ".join(cmd)))
logger.debug(subprocess.check_output(cmd, cwd=build_dir))
return 0
| Check if build number exists | Check if build number exists
(so we won't create a package with 'None' in it's name)
| Python | bsd-3-clause | devopsconsulting/vdt.versionplugin.wheel |
dd0ba5d4486983bd2c498efc46e7b3aa244935e8 | playserver/webserver.py | playserver/webserver.py | import flask
import track
app = flask.flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
| import flask
from . import track
app = flask.flask(__name__)
@app.route("/")
def root():
return "{} by {} - {}"
| Fix track import for package | Fix track import for package
| Python | mit | ollien/playserver,ollien/playserver,ollien/playserver |
437ed5ee5e919186eabd1d71b0c1949adc1cf378 | src/orca/gnome-terminal.py | src/orca/gnome-terminal.py | # gnome-terminal script
import a11y
import speech
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
brlUpdateText (event.source)
| # gnome-terminal script
import a11y
import speech
import default
def onTextInserted (e):
if e.source.role != "terminal":
return
speech.say ("default", e.any_data)
def onTextDeleted (event):
"""Called whenever text is deleted from an object.
Arguments:
- event: the Event
"""
# Ignore text deletions from non-focused objects, unless the
# currently focused object is the parent of the object from which
# text was deleted
#
if (event.source != a11y.focusedObject) \
and (event.source.parent != a11y.focusedObject):
pass
else:
default.brlUpdateText (event.source)
| Call default.brlUpdateText instead of brlUpdateText (which was undefined) | Call default.brlUpdateText instead of brlUpdateText (which was undefined)
| Python | lgpl-2.1 | GNOME/orca,h4ck3rm1k3/orca-sonar,pvagner/orca,h4ck3rm1k3/orca-sonar,GNOME/orca,pvagner/orca,h4ck3rm1k3/orca-sonar,chrys87/orca-beep,chrys87/orca-beep,pvagner/orca,pvagner/orca,chrys87/orca-beep,GNOME/orca,chrys87/orca-beep,GNOME/orca |
45b3fc7babfbd922bdb174e5156f54c567a66de4 | plotly/tests/test_core/test_graph_objs/test_graph_objs_tools.py | plotly/tests/test_core/test_graph_objs/test_graph_objs_tools.py | from __future__ import absolute_import
from unittest import TestCase
| from __future__ import absolute_import
from unittest import TestCase
from plotly.graph_objs import graph_objs as go
from plotly.graph_objs import graph_objs_tools as got
class TestGetRole(TestCase):
def test_get_role_no_value(self):
# this is a bit fragile, but we pick a few stable values
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
fig.layout.title = 'some-title'
parent_key_role_tuples = [
(fig.data[0], 'x', 'data'),
(fig.data[0], 'marker', 'object'),
(fig.data[0].marker, 'color', 'style'),
(fig.layout, 'title', 'info'),
(fig, 'data', 'object'),
]
for parent, key, role in parent_key_role_tuples:
self.assertEqual(got.get_role(parent, key), role, msg=key)
def test_get_role_with_value(self):
# some attributes are conditionally considered data if they're arrays
# the location in the figure matters for this test!
fig = go.Figure(data=[{}])
fig.data[0].marker.color = 'red'
parent_key_value_role_tuples = [
(fig.data[0], 'x', 'wh0cares', 'data'),
(fig.data[0], 'marker', 'wh0cares', 'object'),
(fig.data[0].marker, 'color', 'red', 'style'),
(fig.data[0].marker, 'color', ['red'], 'data')
]
for parent, key, value, role in parent_key_value_role_tuples:
self.assertEqual(got.get_role(parent, key, value), role,
msg=(key, value))
| Add some :tiger2:s for `graph_objs_tools.py`. | Add some :tiger2:s for `graph_objs_tools.py`. | Python | mit | plotly/plotly.py,plotly/python-api,plotly/plotly.py,plotly/python-api,plotly/plotly.py,plotly/python-api |
8cdd7a89ad6115b80ae57ed6cbb0d41abce09816 | src/tests/base/__init__.py | src/tests/base/__init__.py | import os
import sys
import time
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.conf import settings
from selenium import webdriver
# could use Chrome, Firefox, etc... here
BROWSER = os.environ.get('TEST_BROWSER', 'PhantomJS')
class BrowserTest(StaticLiveServerTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
settings.DEBUG = ('--debug' in sys.argv)
def setUp(self):
self.driver = getattr(webdriver, BROWSER)()
self.driver.set_window_size(1920, 1080)
self.driver.implicitly_wait(10)
def tearDown(self):
self.driver.quit()
def scroll_into_view(self, element):
"""Scroll element into view"""
y = element.location['y']
self.driver.execute_script('window.scrollTo(0, {0})'.format(y))
def scroll_and_click(self, element):
self.scroll_into_view(element)
time.sleep(0.5)
element.click()
| import os
import sys
import time
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.conf import settings
from selenium import webdriver
# could use Chrome, Firefox, etc... here
BROWSER = os.environ.get('TEST_BROWSER', 'PhantomJS')
class BrowserTest(StaticLiveServerTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
settings.DEBUG = ('--debug' in sys.argv)
def setUp(self):
if hasattr(webdriver, BROWSER):
self.driver = getattr(webdriver, BROWSER)()
else:
self.driver = webdriver.Remote(
desired_capabilities=webdriver.DesiredCapabilities.CHROME,
command_executor=BROWSER
)
self.driver.set_window_size(1920, 1080)
self.driver.implicitly_wait(10)
def tearDown(self):
self.driver.quit()
def scroll_into_view(self, element):
"""Scroll element into view"""
y = element.location['y']
self.driver.execute_script('window.scrollTo(0, {0})'.format(y))
def scroll_and_click(self, element):
self.scroll_into_view(element)
time.sleep(0.5)
element.click()
| Improve handling of remote test drivers | Improve handling of remote test drivers
| Python | apache-2.0 | Flamacue/pretix,Flamacue/pretix,lab2112/pretix,Unicorn-rzl/pretix,Unicorn-rzl/pretix,Flamacue/pretix,akuks/pretix,akuks/pretix,awg24/pretix,awg24/pretix,akuks/pretix,awg24/pretix,lab2112/pretix,lab2112/pretix,awg24/pretix,akuks/pretix,Unicorn-rzl/pretix,Flamacue/pretix,Unicorn-rzl/pretix,lab2112/pretix |
770781d3ce55a91926b91579e11d79ebb3edf47e | lms/djangoapps/api_manager/management/commands/migrate_orgdata.py | lms/djangoapps/api_manager/management/commands/migrate_orgdata.py | import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
| import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
| Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present | Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
| Python | agpl-3.0 | edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform |
cc7f93d93cb2d7e4aed0329ce41785e419b07a92 | salt/__init__.py | salt/__init__.py | '''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
def start(self):
'''
Execute this method to start up a minion.
'''
minion = salt.Minion(opts)
minion.tune_in()
| '''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
def start(self):
'''
Execute this method to start up a minion.
'''
minion = salt.Minion(self.opts)
minion.tune_in()
| Fix incorrect reference to opts dict | Fix incorrect reference to opts dict
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
668a5240c29047d86fe9451f3078bb163bea0db9 | skan/__init__.py | skan/__init__.py | from .csr import skeleton_to_csgraph, branch_statistics, summarise
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise'] | from .csr import skeleton_to_csgraph, branch_statistics, summarise
__version__ = '0.1-dev'
__all__ = ['skeleton_to_csgraph',
'branch_statistics',
'summarise']
| Add version info to package init | Add version info to package init
| Python | bsd-3-clause | jni/skan |
8ad4850941e299d9dad02cac0e300dc2021b81be | streak-podium/render.py | streak-podium/render.py | import pygal
def horizontal_bar(sorted_streaks, sort_attrib):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort_attrib.
"""
users = [user for user, _ in sorted_streaks][::-1]
streaks = [getattr(streak, sort_attrib) for _, streak in sorted_streaks][::-1]
chart = pygal.HorizontalStackedBar(show_y_labels=False,
show_x_labels=False,
show_legend=False,
print_values=True,
print_zeroes=False,
print_labels=True)
chart.title = 'Top contributors by {} streak'.format(sort_attrib)
chart.x_labels = users
values = []
for value, user in zip(streaks, users):
if value > 0:
values.append({
'value': value,
'label': user,
'xlink': 'https://github.com/{}'.format(user)
})
else:
values.append(0) # Let zeroes be boring
chart.add('Streaks', values)
chart.render_to_file('top.svg')
| import pygal
def horizontal_bar(sorted_streaks, sort):
"""
Render a horizontal bar chart of streaks.
Values have already been sorted by sort.
"""
users = [user for user, _ in sorted_streaks][::-1]
streaks = [getattr(streak, sort) for _, streak in sorted_streaks][::-1]
chart = pygal.HorizontalStackedBar(show_y_labels=False,
show_x_labels=False,
show_legend=False,
print_values=True,
print_zeroes=False,
print_labels=True)
chart.title = 'Top contributors by {} streak'.format(sort)
chart.x_labels = users
values = []
for value, user in zip(streaks, users):
if value > 0:
values.append({
'value': value,
'label': user,
'xlink': 'https://github.com/{}'.format(user)
})
else:
values.append(0) # Let zeroes be boring
chart.add('Streaks', values)
chart.render_to_file('top_{}.svg'.format(sort))
| Rename svg output based on sort attribute | Rename svg output based on sort attribute
| Python | mit | jollyra/hubot-streak-podium,jollyra/hubot-commit-streak,jollyra/hubot-commit-streak,supermitch/streak-podium,supermitch/streak-podium,jollyra/hubot-streak-podium |
2d9fce5715b2d7d5b920d2e77212f076e9ebd1be | staticgen_demo/staticgen_views.py | staticgen_demo/staticgen_views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'sitemap.xml',
'robots.txt',
'page_not_found',
'server_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.utils import translation
from staticgen.staticgen_pool import staticgen_pool
from staticgen.staticgen_views import StaticgenView
class StaicgenDemoStaticViews(StaticgenView):
def items(self):
return (
'sitemap.xml',
'robots.txt',
'page_not_found',
'server_error',
)
staticgen_pool.register(StaicgenDemoStaticViews)
class StaticgenCMSView(StaticgenView):
def items(self):
try:
from cms.models import Title
except ImportError: # pragma: no cover
# django-cms is not installed.
return super(StaticgenCMSView, self).items()
items = Title.objects.public().filter(
page__login_required=False,
page__site_id=settings.SITE_ID,
).order_by('page__path')
return items
def url(self, obj):
translation.activate(obj.language)
url = obj.page.get_absolute_url(obj.language)
translation.deactivate()
return url
staticgen_pool.register(StaticgenCMSView)
| Add CMS Pages to staticgen registry. | Add CMS Pages to staticgen registry.
| Python | bsd-3-clause | mishbahr/staticgen-demo,mishbahr/staticgen-demo,mishbahr/staticgen-demo |
4d73eb2a7e06e1e2607a2abfae1063b9969e70a0 | strichliste/strichliste/models.py | strichliste/strichliste/models.py | from django.db import models
from django.db.models import Sum
class User(models.Model):
name = models.CharField(max_length=254, unique=True)
create_date = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
mail_address = models.EmailField(null=True)
@property
def last_transaction(self):
try:
return self.transactions.last().create_date
except AttributeError:
return None
@property
def balance(self):
return self.transactions.aggregate(sum=Sum('value'))['sum'] or 0
def to_full_dict(self):
return {'id': self.id, 'name': self.name, 'mail_address': self.mail_address,
'balance': self.balance, 'last_transaction': self.last_transaction}
def to_dict(self):
return {'id': self.id, 'name': self.name, 'balance': self.balance, 'last_transaction': self.last_transaction}
def __str__(self):
return self.name
class Transaction(models.Model):
user = models.ForeignKey('User', related_name='transactions',
on_delete=models.PROTECT, db_index=True)
create_date = models.DateTimeField(auto_now_add=True)
value = models.IntegerField()
def to_dict(self):
return {'id': self.id,
'create_date': self.create_date,
'value': self.value}
class Meta:
ordering = ('create_date',)
| from django.db import models
from django.db.models import Sum
class User(models.Model):
name = models.CharField(max_length=254, unique=True)
create_date = models.DateTimeField(auto_now_add=True)
active = models.BooleanField(default=True)
mail_address = models.EmailField(null=True)
@property
def last_transaction(self):
try:
return self.transactions.last().create_date
except AttributeError:
return None
@property
def balance(self):
return self.transactions.aggregate(sum=Sum('value'))['sum'] or 0
def to_full_dict(self):
return {'id': self.id, 'name': self.name, 'mail_address': self.mail_address,
'balance': self.balance, 'last_transaction': self.last_transaction}
def to_dict(self):
return {'id': self.id, 'name': self.name, 'balance': self.balance, 'last_transaction': self.last_transaction}
def __str__(self):
return self.name
class Transaction(models.Model):
user = models.ForeignKey('User', related_name='transactions',
on_delete=models.PROTECT, db_index=True)
create_date = models.DateTimeField(auto_now_add=True)
value = models.IntegerField()
def to_dict(self):
return {'id': self.id,
'create_date': self.create_date,
'value': self.value,
'user': self.user_id}
class Meta:
ordering = ('create_date',)
| Add user_id to returned transactions | Add user_id to returned transactions
| Python | mit | Don42/strichliste-django,hackerspace-bootstrap/strichliste-django |
0f1cb413503034cbc1e2deddd8327ad1946201fe | numba2/compiler/optimizations/throwing.py | numba2/compiler/optimizations/throwing.py | # -*- coding: utf-8 -*-
"""
Rewrite exceptions that are thrown and caught locally to jumps.
"""
from numba2.compiler import excmodel
from pykit.optimizations import local_exceptions
def rewrite_local_exceptions(func, env):
local_exceptions.run(func, env, exc_model=excmodel.ExcModel(env))
def rewrite_exceptions(func, env):
for op in func.ops:
if op.opcode == 'exc_throw':
raise NotImplementedError("Exception throwing", op, func)
if op.opcode in ('exc_catch', 'exc_setup'):
op.delete()
| # -*- coding: utf-8 -*-
"""
Rewrite exceptions that are thrown and caught locally to jumps.
"""
from numba2.compiler import excmodel
from pykit.analysis import cfa
from pykit.optimizations import local_exceptions
def rewrite_local_exceptions(func, env):
local_exceptions.run(func, env, exc_model=excmodel.ExcModel(env))
def rewrite_exceptions(func, env):
blocks = set()
for op in func.ops:
if op.opcode == 'exc_throw':
raise NotImplementedError("Exception throwing", op, func)
if op.opcode in ('exc_catch', 'exc_setup'):
blocks.add(op.block)
op.delete()
update_outdated_incoming_blocks(func, blocks)
def update_outdated_incoming_blocks(func, candidates):
"""
Update phi nodes in blocks previously containing 'exc_catch'. 'exc_setup'
may span many blocks, and none, or only a subset of those blocks may be
actual predecessors.
"""
cfg = cfa.cfg(func)
for block in candidates:
preds = cfg.predecessors(block)
for op in block.leaders:
if op.opcode == 'phi':
blocks, values = op.args
newblocks = [block for block in blocks if block in preds]
newvalues = [val for block, val in zip(blocks, values)
if block in preds]
op.set_args([newblocks, newvalues]) | Rewrite phis from outdated incoming exception blocks | Rewrite phis from outdated incoming exception blocks
| Python | bsd-2-clause | flypy/flypy,flypy/flypy |
dcf8622f6b40ba41f67638614cf3754b17005d4d | pombola/south_africa/templatetags/za_speeches.py | pombola/south_africa/templatetags/za_speeches.py | from django import template
register = template.Library()
@register.inclusion_tag('speeches/_section_prev_next_links.html')
def section_prev_next_links(section):
next_section = section.get_next_node()
prev_section = section.get_previous_node()
return {
"next": next_section,
"previous": prev_section,
}
| import datetime
from django import template
from speeches.models import Section
register = template.Library()
# NOTE: this code is far from ideal. Sharing it with others in a pull request
# to get opinions about how to improve.
# TODO:
# - cache results of min_speech_datetime and section_prev_next_links (both of
# which will be called multiple times with same input)
@register.inclusion_tag('speeches/_section_prev_next_links.html')
def section_prev_next_links(section):
return {
"next": get_neighboring_section(section, +1),
"previous": get_neighboring_section(section, -1),
}
def get_neighboring_section(section, direction):
"""
This code is specific to the section hierachy that is used for the
questions and hansard in the SayIt for ZA.
This is essentially:
hansard
2012
March
13
Some section (has speeches)
and
Questions
Minister of Foo
16 Oct 2009 (has speeches)
"""
# These lines lightly modified from https://github.com/mysociety/sayit/blob/master/speeches/models.py#L356-L369
# 'root' is set to be the section's parent, and s/self/section/, some
# formatting changes
if not section.parent:
return None
tree = section.parent.get_descendants
idx = tree.index(section)
lvl = tree[idx].level
same_level = [ s for s in tree if s.level == lvl ]
idx = same_level.index(section)
if direction == -1 and idx == 0:
return None
try:
return same_level[idx+direction]
except:
return None
| Change next/prev finding logic to stay in same section | [1119] Change next/prev finding logic to stay in same section
This uses code from speeches.models._get_next_previous_node.
Thanks to Matthew Somerville for that suggestion.
| Python | agpl-3.0 | patricmutwiri/pombola,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,mysociety/pombola,patricmutwiri/pombola,mysociety/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,ken-muturi/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,geoffkilpin/pombola,ken-muturi/pombola,mysociety/pombola,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,mysociety/pombola,hzj123/56th,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola |
211b7b28e2d8c7ed0e0f67bea1a1a68b520a53b1 | pagerduty_events_api/pagerduty_service.py | pagerduty_events_api/pagerduty_service.py | from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
payload = {'service_key': self.__service_key,
'event_type': 'trigger',
'description': description}
incident_data = PagerdutyRestClient().post(
self.__append_additional_info_to_payload(payload, additional_params)
)
return PagerdutyIncident(self.__service_key, incident_data['incident_key'])
@staticmethod
def __append_additional_info_to_payload(mandatory_data, additional_data):
return {**additional_data, **mandatory_data}
| from pagerduty_events_api.pagerduty_incident import PagerdutyIncident
from pagerduty_events_api.pagerduty_rest_client import PagerdutyRestClient
class PagerdutyService:
def __init__(self, key):
self.__service_key = key
def get_service_key(self):
return self.__service_key
def trigger(self, description, additional_params={}):
incident = PagerdutyIncident(self.__service_key)
incident.trigger(description, additional_params)
return incident
| Use "blank" PD incident instance for triggering through PD service. | Use "blank" PD incident instance for triggering through PD service.
| Python | mit | BlasiusVonSzerencsi/pagerduty-events-api |
f90fac30454537ec0727371ffc54bde4a1e2f78d | 5_control_statements_and_exceptions_hierarchy/guess-a-number-ex.py | 5_control_statements_and_exceptions_hierarchy/guess-a-number-ex.py | """
This is an example of the control structures.
"""
result = ""
our_number = 21
def test_number(answer):
answer = int(answer)
if answer == our_number:
return "got it right"
elif answer > our_number:
return "nope, lower"
else:
return "nope, higher"
while result != "got it right":
result = test_number(raw_input("Choose a number:"))
print result
| """
This is an example of the control structures.
"""
if __name__ == "__main__":
result = ""
our_number = 21
def test_number(answer):
answer = int(answer)
if answer == our_number:
return "got it right"
elif answer > our_number:
return "nope, lower"
else:
return "nope, higher"
while result != "got it right":
result = test_number(raw_input("Choose a number:"))
print result
| Put the code in __main__ for lesson 5 guess-a-number example. | Put the code in __main__ for lesson 5 guess-a-number example.
| Python | mit | razzius/PyClassLessons,razzius/PyClassLessons,razzius/PyClassLessons,razzius/PyClassLessons,PyClass/PyClassLessons,noisebridge/PythonClass,noisebridge/PythonClass,noisebridge/PythonClass,PyClass/PyClassLessons,noisebridge/PythonClass,PyClass/PyClassLessons |
524d5427d54342f26008a5b527140d4158f70edf | tests/test_extension.py | tests/test_extension.py | from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| from __future__ import unicode_literals
import json
from test_helpers import MockTrack, get_websocket, make_frontend, patched_bot
from mopidy_tachikoma import Extension
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[tachikoma]' in config
assert 'enabled = true' in config
assert 'slack_token = ' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'slack_token' in schema
@patched_bot
def test_can_connect():
make_frontend()
@patched_bot
def test_gets_events():
frontend = make_frontend()
frontend.doSlackLoop(
None, MockTrack(),
[{"type": "message", "channel": "mock_channel"}])
data = json.loads(get_websocket().data)
assert {
'channel': 'mock_channel',
'text': 'Now playing *foo* from *bar*',
'type': 'message'} == data
@patched_bot
def test_says_one_thing_per_channel():
frontend = make_frontend()
song = MockTrack()
get_websocket().data = None # make sure it's cleared
frontend.doSlackLoop(
song, song, [{"type": "message", "channel": "mock_channel"}])
assert get_websocket().data is None # same song, no info
| Clear websocket data to try and fix Travis | Clear websocket data to try and fix Travis
| Python | agpl-3.0 | palfrey/mopidy-tachikoma,palfrey/mopidy-tachikoma |
87d2e511b0fedd2a09610c35337336d443a756a4 | tests/unit/cli/filewatch/test_stat.py | tests/unit/cli/filewatch/test_stat.py | import os
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
assert len(calls) == 1
| import os
import time
from chalice.cli.filewatch import stat
class FakeOSUtils(object):
def __init__(self):
self.initial_scan = True
def walk(self, rootdir):
yield 'rootdir', [], ['bad-file', 'baz']
if self.initial_scan:
self.initial_scan = False
def joinpath(self, *parts):
return os.path.join(*parts)
def mtime(self, path):
if self.initial_scan:
return 1
if path.endswith('bad-file'):
raise OSError("Bad file")
return 2
def test_can_ignore_stat_errors():
calls = []
def callback(*args, **kwargs):
calls.append((args, kwargs))
watcher = stat.StatFileWatcher(FakeOSUtils())
watcher.watch_for_file_changes('rootdir', callback)
for _ in range(10):
if len(calls) == 1:
break
time.sleep(0.2)
else:
raise AssertionError("Expected callback to be invoked but was not.")
| Add polling loop to allow time for callback to be invoked | Add polling loop to allow time for callback to be invoked
| Python | apache-2.0 | awslabs/chalice |
6d60adad1caffdf35d0285a4d765a1f000efa12a | ckanext/latvian_theme/plugin.py | ckanext/latvian_theme/plugin.py | import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
class Latvian_ThemePlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
# IConfigurer
def update_config(self, config_):
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'latvian_theme')
class Latvian_AuthPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IAuthFunctions)
def auth_user_list(self, context, data_dict):
user = context["model"].User.get(context.get('user'))
#ignore_auth is used in email sending code
if context.ignore_auth is True:
return {'success': True}
#sysadmin can see anything
elif user.sysadmin:
return {'success': True}
#all else fails
else:
return {'success': False}
#IAuthFunctions
def get_auth_functions(self):
return {"user_list": self.auth_user_list}
| import ckan.plugins as plugins
import ckan.plugins.toolkit as toolkit
class Latvian_ThemePlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IConfigurer)
# IConfigurer
def update_config(self, config_):
toolkit.add_template_directory(config_, 'templates')
toolkit.add_public_directory(config_, 'public')
toolkit.add_resource('fanstatic', 'latvian_theme')
class Latvian_AuthPlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IAuthFunctions)
def auth_user_list(self, context, data_dict):
user = context["model"].User.get(context.get('user'))
#ignore_auth is used in email sending code
if context.get('ignore_auth') is True:
return {'success': True}
#sysadmin can see anything
elif user.sysadmin:
return {'success': True}
#all else fails
else:
return {'success': False}
#IAuthFunctions
def get_auth_functions(self):
return {"user_list": self.auth_user_list}
| Fix for a small problem | Fix for a small problem
| Python | agpl-3.0 | dpp-dev/ckanext-latvian-theme,dpp-dev/ckanext-latvian-theme,dpp-dev/ckanext-latvian-theme,dpp-dev/ckanext-latvian-theme |
ce12cd0f56997dc6d33a9e4e7c13df27d05a133b | Python/Tests/TestData/DebuggerProject/ThreadJoin.py | Python/Tests/TestData/DebuggerProject/ThreadJoin.py | from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
from threading import Thread
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
| from threading import Thread
global exit_flag
exit_flag = False
def g():
i = 1
while not exit_flag:
i = (i + 1) % 100000000
if i % 100000 == 0: print("f making progress: {0}".format(i))
def f():
g()
def n():
t1 = Thread(target=f,name="F_thread")
t1.start()
t1.join()
def m():
n()
if __name__ == '__main__':
m()
| Remove redundant import from test script. | Remove redundant import from test script.
| Python | apache-2.0 | zooba/PTVS,zooba/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,huguesv/PTVS,Microsoft/PTVS,int19h/PTVS,zooba/PTVS,int19h/PTVS,int19h/PTVS,huguesv/PTVS,Microsoft/PTVS,int19h/PTVS,Microsoft/PTVS,zooba/PTVS,Microsoft/PTVS,int19h/PTVS,Microsoft/PTVS,zooba/PTVS,huguesv/PTVS,zooba/PTVS,Microsoft/PTVS,huguesv/PTVS |
d40fa3554847a239f90a7f7edec8efbf30c753f0 | scripts/lib/check_for_course_revisions.py | scripts/lib/check_for_course_revisions.py | import json
from .load_data_from_file import load_data_from_file
from .get_old_dict_values import get_old_dict_values
from .log import log
from .paths import make_course_path
def load_previous(course_path):
try:
prior_data = load_data_from_file(course_path)
prior = json.loads(prior_data)
except FileNotFoundError:
prior = None
revisions = []
# print(course_path, revisions)
if prior and ('revisions' in prior):
revisions = prior['revisions']
del prior['revisions']
return (prior, revisions or [])
def check_for_revisions(course):
prior, revisions = load_previous(make_course_path(course['clbid']))
if not prior:
return None
diff = get_old_dict_values(prior, course)
if diff:
revisions.append(diff)
log('revision in %d:' % (course['clbid']), diff)
if revisions and (('revisions' not in course) or (revisions != course.get('revisions'))):
return revisions
return None
| from collections import OrderedDict
import json
from .load_data_from_file import load_data_from_file
from .get_old_dict_values import get_old_dict_values
from .log import log
from .paths import make_course_path
def load_previous(course_path):
try:
prior_data = load_data_from_file(course_path)
prior = json.loads(prior_data)
except FileNotFoundError:
prior = None
revisions = []
# print(course_path, revisions)
if prior and ('revisions' in prior):
revisions = prior['revisions']
del prior['revisions']
return (prior, revisions or [])
def check_for_revisions(course):
prior, revisions = load_previous(make_course_path(course['clbid']))
if not prior:
return None
diff = get_old_dict_values(prior, course)
ordered_diff = OrderedDict()
for key in sorted(diff.keys()):
ordered_diff[key] = diff[key]
if ordered_diff:
revisions.append(ordered_diff)
log('revision in %d:' % (course['clbid']), ordered_diff)
if revisions and (('revisions' not in course) or (revisions != course.get('revisions'))):
return revisions
return None
| Use an ordereddict for sorting revisions | Use an ordereddict for sorting revisions
| Python | mit | StoDevX/course-data-tools,StoDevX/course-data-tools |
e7942afdc1e93aec57e4e02d862a91eab9b5c0cb | trackingtermites/termite.py | trackingtermites/termite.py | from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
| from collections import namedtuple
class Termite:
def __init__(self, label, color):
self.label = label
self.color = color
self.trail = []
self.tracker = None
def to_csv(self):
with open('data/{}-trail.csv'.format(self.label), mode='w') as trail_out:
trail_out.write('label,frame,time,x,y,xoffset,yoffset\n')
for record in self.trail:
trail_out.write('{},{},{},{},{},{},{}\n'.format(self.label,
record.frame, record.time, record.x, record.y,
record.xoffset, record.yoffset))
| Include missing columns in output | Include missing columns in output
| Python | mit | dmrib/trackingtermites |
b0814b95ea854f7b3f0b9db48ae9beee078c2a30 | versions/software/openjdk.py | versions/software/openjdk.py | import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def downloadable_version(url):
"""Strip the version out of the Zulu OpenJDK manual download link."""
# example: http://cdn.azul.com/.../zulu8.23.0.3-jdk8.0.144-win_x64.zip
filename = url[url.rfind('/') + 1:]
jdk_version = get_text_between(filename, '-jdk', '-')
version, update = jdk_version.rsplit('.', 1)
return f'1.{version}_{update}'
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
div = soup.find('div', class_='latest_area')
if div:
zip_filename = re.compile('\.zip$')
tag = div.find('a', class_='r-download', href=zip_filename)
if tag:
return downloadable_version(tag.attrs['href'])
return 'Unknown'
| import re
from versions.software.utils import get_command_stderr, get_soup, \
get_text_between
def name():
"""Return the precise name for the software."""
return 'Zulu OpenJDK'
def installed_version():
"""Return the installed version of the jdk, or None if not installed."""
try:
version_string = get_command_stderr(('java', '-version'))
# "1.8.0_162" or "9.0.4.1" for example
return get_text_between(version_string, '"', '"')
except FileNotFoundError:
pass
def latest_version():
"""Return the latest version of Zulu OpenJDK available for download."""
installed = installed_version()
soup = get_soup('http://www.azul.com/downloads/zulu/zulu-windows/')
if soup:
zip_filename = re.compile('\.zip$')
for tag in soup.find_all('a', class_='r-download', href=zip_filename):
filename = tag.attrs['href']
zulu = get_text_between(filename, 'bin/zulu', '-')
jdk = get_text_between(filename, 'jdk', '-')
if (installed is None) or (installed[0] == '9' and zulu[0] == '9'):
return zulu
elif installed[0] == '1' and jdk[0] == installed[2]:
version, update = jdk.rsplit('.', 1)
return f'1.{version}_{update}'
return 'Unknown'
| Update OpenJDK version to support both 8 and 9. | Update OpenJDK version to support both 8 and 9.
| Python | mit | mchung94/latest-versions |
daa5de8071bc0694115dce3d8cc1a3733e269910 | py/ops/itests/test_deps.py | py/ops/itests/test_deps.py | import unittest
from subprocess import call, check_call, check_output
import os.path
from .fixtures import Fixture
@Fixture.inside_container
class DepsTest(Fixture, unittest.TestCase):
def test_install_deps(self):
# Ensure rkt is not installed
self.assertEqual(1, call(['which', 'rkt']))
# The current latest version is 1.25.0
cmd = ('python3 -m ops.onboard --verbose deps install rkt:latest'
.split())
# Save test time if we have a local tarball
if os.path.exists('/tmp/tarballs/rkt-v1.25.0.tar.gz'):
cmd.extend(['--tarball', '/tmp/tarballs/rkt-v1.25.0.tar.gz'])
check_call(cmd)
output = check_output(['rkt', 'version'])
self.assertTrue(b'rkt Version: 1.25.0' in output, repr(output))
output = check_output(['rkt', 'image', 'list'])
self.assertTrue(
b'coreos.com/rkt/stage1-coreos:1.25.0' in output,
repr(output),
)
if __name__ == '__main__':
unittest.main()
| import unittest
from subprocess import call, check_call, check_output
import os.path
from .fixtures import Fixture
@Fixture.inside_container
class DepsTest(Fixture, unittest.TestCase):
def test_install_deps(self):
# Ensure rkt is not installed
self.assertEqual(1, call(['which', 'rkt']))
# The current latest version is 1.29.0
cmd = ('python3 -m ops.onboard --verbose deps install rkt:latest'
.split())
# Save test time if we have a local tarball
if os.path.exists('/tmp/tarballs/rkt-v1.29.0.tar.gz'):
cmd.extend(['--tarball', '/tmp/tarballs/rkt-v1.29.0.tar.gz'])
check_call(cmd)
output = check_output(['rkt', 'version'])
self.assertTrue(b'rkt Version: 1.29.0' in output, repr(output))
output = check_output(['rkt', 'image', 'list'])
self.assertTrue(
b'coreos.com/rkt/stage1-coreos:1.29.0' in output,
repr(output),
)
if __name__ == '__main__':
unittest.main()
| Update ops integration test rkt version | Update ops integration test rkt version
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage |
79a2671c32c558aeb429c590c255f3092dba7e0b | zeus/api/resources/user_builds.py | zeus/api/resources/user_builds.py | from sqlalchemy.orm import contains_eager, joinedload, subqueryload_all
from zeus import auth
from zeus.config import db
from zeus.models import Author, Build, Email, Source, User
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class UserBuildsResource(Resource):
def get(self, user_id):
"""
Return a list of builds for the given user.
"""
if user_id == 'me':
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
else:
user = User.query.get(user_id)
query = Build.query.options(
joinedload('repository'),
contains_eager('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).join(
Source,
Build.source_id == Source.id,
).filter(
Source.author_id.in_(db.session.query(Author.id).filter(Author.email.in_(
db.session.query(Email.email).filter(
Email.user_id == user.id
)
)))
).order_by(Build.number.desc())
return self.paginate_with_schema(builds_schema, query)
| from sqlalchemy.orm import contains_eager, joinedload, subqueryload_all
from zeus import auth
from zeus.config import db
from zeus.models import Author, Build, Email, Source, User
from .base import Resource
from ..schemas import BuildSchema
builds_schema = BuildSchema(many=True, strict=True)
class UserBuildsResource(Resource):
def get(self, user_id):
"""
Return a list of builds for the given user.
"""
if user_id == 'me':
user = auth.get_current_user()
if not user:
return self.error('not authenticated', 401)
else:
user = User.query.get(user_id)
query = Build.query.options(
joinedload('repository'),
contains_eager('source'),
joinedload('source').joinedload('author'),
joinedload('source').joinedload('revision'),
joinedload('source').joinedload('patch'),
subqueryload_all('stats'),
).join(
Source,
Build.source_id == Source.id,
).filter(
Source.author_id.in_(db.session.query(Author.id).filter(Author.email.in_(
db.session.query(Email.email).filter(
Email.user_id == user.id
)
)))
).order_by(Build.number.date_created())
return self.paginate_with_schema(builds_schema, query)
| Use date_created for "My Builds" sort | fix: Use date_created for "My Builds" sort
| Python | apache-2.0 | getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus |
3b4c645792c1a58cdce3dc25171723e7139d66da | workflows/api/permissions.py | workflows/api/permissions.py | from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
| from rest_framework import permissions
from workflows.models import *
class IsAdminOrSelf(permissions.BasePermission):
def has_permission(self, request, view):
if request.user and request.user.is_authenticated():
# Don't allow adding widgets to workflows not owned by the user
if view.model == Widget and 'workflow' in request.data:
serializer = view.serializer_class(data=request.data)
serializer.is_valid()
workflow = serializer.validated_data['workflow']
if request.GET.get('preview', '0') == '1':
if workflow.public:
return True
return workflow.user == request.user
if view.model == Workflow and 'staff_pick' in request.data:
return request.user.is_staff
else:
return True
def has_object_permission(self, request, view, obj):
if request.user and request.user.is_authenticated():
if request.user.is_superuser:
return True
# Allow only editing of the user's workflow objects
if isinstance(obj, Workflow):
return obj.user == request.user
if isinstance(obj, Widget):
return obj.workflow.user == request.user
if isinstance(obj, Connection):
return obj.workflow.user == request.user
if isinstance(obj, Input):
return obj.widget.workflow.user == request.user
if isinstance(obj, Output):
return obj.widget.workflow.user == request.user
return False
| Return True for preview if workflow public | Return True for preview if workflow public
| Python | mit | xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend,xflows/clowdflows-backend |
452ad6f3de797285a50094a4a145714e75204d95 | bake/cmdline.py | bake/cmdline.py | #!/usr/bin/env python
# encoding: utf-8
# This is the command line interface for bake. For people who want to take
# bake.py and extend it for their own circumstances, modifying the main routine
# in this module is probably the best place to start.
import api as bake
import sys
# This
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file,'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# encoding: utf-8
# This is the command line interface for bake. For people who want to take
# bake.py and extend it for their own circumstances, modifying the main routine
# in this module is probably the best place to start.
import api as bake
import sys
def main(args=sys.argv[1:]):
# Set up command line argument options
optparser = bake.make_optparser()
options, arguments = optparser.parse_args()
bake.process_options(options)
## Configuration is stored in the bake.cfg file in the current directory
config = bake.load_config()
## End processing of command line parameters
## Prepare for big loop
# The overwrite command pushes lines onto the top of the bake parameter file
if options.overwrite:
lines = options.overwrite
else:
lines = []
# Load bake parameter file
hin = open(options.file, 'r')
lines += hin.readlines()
hin.close()
# This mixIterator object is kind of the core of bake.
(label, tokens,
mixIterator) = bake.make_iterator(config['label']['label_tag'],
config['label']['pattern'],
lines, options.slice_start,
options.slice_end)
## This is the main loop, iterating over each set of values
bake.default_loop(label, tokens, mixIterator, config, options)
if __name__ == '__main__':
main()
| Make pep8 run mostly cleanly | Make pep8 run mostly cleanly
| Python | mit | AlexSzatmary/bake |
d5cf661b2658d7f9a0f5436444373202e514bf37 | src/psd_tools2/__init__.py | src/psd_tools2/__init__.py | from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
| from __future__ import absolute_import, unicode_literals
from .api.psd_image import PSDImage
from .api.composer import compose
| Include compose in the top level | Include compose in the top level
| Python | mit | kmike/psd-tools,psd-tools/psd-tools,kmike/psd-tools |
2fea7b008336e1960efb375c63a4cc14053bc590 | src/wikicurses/__init__.py | src/wikicurses/__init__.py | import pkgutil
from enum import IntEnum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
| import pkgutil
from enum import Enum
_data = pkgutil.get_data('wikicurses', 'interwiki.list').decode()
wikis = dict([i.split('|')[0:2] for i in _data.splitlines() if i[0]!='#'])
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
| Create BitEnum class for bitfields | Create BitEnum class for bitfields
| Python | mit | ids1024/wikicurses |
e3a1d4998494143491b49312673ceb84ea98b7f8 | RatS/tmdb/tmdb_ratings_inserter.py | RatS/tmdb/tmdb_ratings_inserter.py | import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/account/{username}/import'.format(
username=self.site.USERNAME
)
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
| import time
from RatS.base.base_ratings_uploader import RatingsUploader
from RatS.tmdb.tmdb_site import TMDB
class TMDBRatingsInserter(RatingsUploader):
def __init__(self, args):
super(TMDBRatingsInserter, self).__init__(TMDB(args), args)
self.url_for_csv_file_upload = self._get_url_for_csv_upload()
self.css_id_of_file_input_element = 'csv_file'
self.xpath_selector_for_submit_button = "//form[@name='import_csv']//input[@type='submit']"
def _get_url_for_csv_upload(self):
return 'https://www.themoviedb.org/settings/import-list'
def pre_upload_action(self):
cookie_accept_button = self.site.browser.find_element_by_id('cookie_notice')\
.find_elements_by_class_name('accept')
if cookie_accept_button is not None and len(cookie_accept_button) > 0:
cookie_accept_button[0].click()
time.sleep(1)
| Adjust TMDB import page URL | Adjust TMDB import page URL
| Python | agpl-3.0 | StegSchreck/RatS,StegSchreck/RatS,StegSchreck/RatS |
989966444e63336b59da04265dbeb901258f75c1 | us_ignite/snippets/management/commands/snippets_load_fixtures.py | us_ignite/snippets/management/commands/snippets_load_fixtures.py | from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
{
'slug': 'blog-sidebar',
'name': 'Dynamic content',
'body': '',
'url_text': '',
'url': '',
},
{
'slug': 'profile-welcome',
'name': 'Welcome message in the profile',
'body': 'Lorem ipsum',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| from django.core.management.base import BaseCommand
from us_ignite.snippets.models import Snippet
FIXTURES = [
{
'slug': 'home-box',
'name': 'UP NEXT: LOREM IPSUM',
'body': '',
'url_text': 'GET INVOLVED',
'url': '',
},
{
'slug': 'featured',
'name': 'FEATURED CONTENT',
'body': '',
'url_text': 'FEATURED',
'url': '',
},
{
'slug': 'welcome-email',
'name': 'Welcome to US Ignite',
'body': '',
'url_text': '',
'url': '',
},
{
'slug': 'blog-sidebar',
'name': 'Blog sidebar featured content.',
'body': '',
'url_text': '',
'url': '',
},
{
'slug': 'profile-welcome',
'name': 'Welcome message in the profile',
'body': 'Lorem ipsum',
'url_text': '',
'url': '',
},
]
class Command(BaseCommand):
def handle(self, *args, **options):
for data in FIXTURES:
try:
# Ignore existing snippets:
Snippet.objects.get(slug=data['slug'])
continue
except Snippet.DoesNotExist:
pass
data.update({
'status': Snippet.PUBLISHED,
})
Snippet.objects.create(**data)
print u'Importing %s' % data['slug']
print "Done!"
| Update description of the blog sidebar snippet. | Update description of the blog sidebar snippet.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite |
7718608741b7126e9239af71d8b2e140dce81303 | common/djangoapps/microsite_configuration/templatetags/microsite.py | common/djangoapps/microsite_configuration/templatetags/microsite.py | """
Template tags and helper functions for displaying breadcrumbs in page titles
based on the current micro site.
"""
from django import template
from django.conf import settings
from microsite_configuration.middleware import MicrositeConfiguration
register = template.Library()
def page_title_breadcrumbs(*crumbs, **kwargs):
"""
This function creates a suitable page title in the form:
Specific | Less Specific | General | edX
It will output the correct platform name for the request.
Pass in a `separator` kwarg to override the default of " | "
"""
separator = kwargs.get("separator", " | ")
if crumbs:
return '{}{}{}'.format(separator.join(crumbs), separator, platform_name())
else:
return platform_name()
@register.simple_tag(name="page_title_breadcrumbs", takes_context=True)
def page_title_breadcrumbs_tag(context, *crumbs):
"""
Django template that creates breadcrumbs for page titles:
{% page_title_breadcrumbs "Specific" "Less Specific" General %}
"""
return page_title_breadcrumbs(*crumbs)
@register.simple_tag(name="platform_name")
def platform_name():
"""
Django template tag that outputs the current platform name:
{% platform_name %}
"""
return MicrositeConfiguration.get_microsite_configuration_value('platform_name', settings.PLATFORM_NAME) | """
Template tags and helper functions for displaying breadcrumbs in page titles
based on the current micro site.
"""
from django import template
from django.conf import settings
from microsite_configuration.middleware import MicrositeConfiguration
register = template.Library()
def page_title_breadcrumbs(*crumbs, **kwargs):
"""
This function creates a suitable page title in the form:
Specific | Less Specific | General | edX
It will output the correct platform name for the request.
Pass in a `separator` kwarg to override the default of " | "
"""
separator = kwargs.get("separator", " | ")
if crumbs:
return u'{}{}{}'.format(separator.join(crumbs), separator, platform_name())
else:
return platform_name()
@register.simple_tag(name="page_title_breadcrumbs", takes_context=True)
def page_title_breadcrumbs_tag(context, *crumbs):
"""
Django template that creates breadcrumbs for page titles:
{% page_title_breadcrumbs "Specific" "Less Specific" General %}
"""
return page_title_breadcrumbs(*crumbs)
@register.simple_tag(name="platform_name")
def platform_name():
"""
Django template tag that outputs the current platform name:
{% platform_name %}
"""
return MicrositeConfiguration.get_microsite_configuration_value('platform_name', settings.PLATFORM_NAME)
| Fix unicode error in subsection | Fix unicode error in subsection
| Python | agpl-3.0 | kxliugang/edx-platform,ZLLab-Mooc/edx-platform,rhndg/openedx,beni55/edx-platform,chudaol/edx-platform,beni55/edx-platform,openfun/edx-platform,atsolakid/edx-platform,romain-li/edx-platform,jonathan-beard/edx-platform,torchingloom/edx-platform,deepsrijit1105/edx-platform,stvstnfrd/edx-platform,ubc/edx-platform,nttks/jenkins-test,xingyepei/edx-platform,CourseTalk/edx-platform,teltek/edx-platform,philanthropy-u/edx-platform,naresh21/synergetics-edx-platform,zhenzhai/edx-platform,bigdatauniversity/edx-platform,hkawasaki/kawasaki-aio8-1,don-github/edx-platform,mushtaqak/edx-platform,hkawasaki/kawasaki-aio8-1,bitifirefly/edx-platform,nanolearning/edx-platform,jamiefolsom/edx-platform,kxliugang/edx-platform,abdoosh00/edraak,JioEducation/edx-platform,auferack08/edx-platform,msegado/edx-platform,J861449197/edx-platform,appliedx/edx-platform,cselis86/edx-platform,torchingloom/edx-platform,hastexo/edx-platform,pepeportela/edx-platform,stvstnfrd/edx-platform,olexiim/edx-platform,leansoft/edx-platform,edry/edx-platform,ovnicraft/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,hamzehd/edx-platform,mitocw/edx-platform,shabab12/edx-platform,chudaol/edx-platform,teltek/edx-platform,lduarte1991/edx-platform,pomegranited/edx-platform,ovnicraft/edx-platform,atsolakid/edx-platform,zerobatu/edx-platform,nanolearningllc/edx-platform-cypress-2,unicri/edx-platform,franosincic/edx-platform,jamiefolsom/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,wwj718/edx-platform,B-MOOC/edx-platform,nttks/edx-platform,ahmadiga/min_edx,OmarIthawi/edx-platform,SravanthiSinha/edx-platform,hmcmooc/muddx-platform,shubhdev/openedx,Edraak/circleci-edx-platform,chrisndodge/edx-platform,ubc/edx-platform,pomegranited/edx-platform,xuxiao19910803/edx-platform,xinjiguaike/edx-platform,solashirai/edx-platform,nikolas/edx-platform,msegado/edx-platform,pabloborrego93/edx-platform,procangroup/edx-platform,shurihell/testasia,edx-solutions/edx-platform,Semi-global/edx-platform,ESOedX/edx-platform,vasyarv/edx-platform,J861449197/edx-platform,martynovp/edx-platform,rue89-tech/edx-platform,AkA84/edx-platform,etzhou/edx-platform,hkawasaki/kawasaki-aio8-0,rue89-tech/edx-platform,valtech-mooc/edx-platform,chrisndodge/edx-platform,shubhdev/edx-platform,UXE/local-edx,arifsetiawan/edx-platform,mbareta/edx-platform-ft,MakeHer/edx-platform,romain-li/edx-platform,nanolearning/edx-platform,playm2mboy/edx-platform,RPI-OPENEDX/edx-platform,halvertoluke/edx-platform,halvertoluke/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,yokose-ks/edx-platform,prarthitm/edxplatform,devs1991/test_edx_docmode,rismalrv/edx-platform,jelugbo/tundex,Livit/Livit.Learn.EdX,xuxiao19910803/edx,rhndg/openedx,jelugbo/tundex,AkA84/edx-platform,solashirai/edx-platform,beacloudgenius/edx-platform,knehez/edx-platform,xuxiao19910803/edx,ampax/edx-platform,chauhanhardik/populo,knehez/edx-platform,nanolearningllc/edx-platform-cypress,DefyVentures/edx-platform,ovnicraft/edx-platform,solashirai/edx-platform,fintech-circle/edx-platform,nanolearningllc/edx-platform-cypress-2,hkawasaki/kawasaki-aio8-2,Lektorium-LLC/edx-platform,olexiim/edx-platform,mjirayu/sit_academy,Kalyzee/edx-platform,louyihua/edx-platform,vismartltd/edx-platform,DefyVentures/edx-platform,kmoocdev/edx-platform,jamiefolsom/edx-platform,nanolearningllc/edx-platform-cypress,chauhanhardik/populo,waheedahmed/edx-platform,pepeportela/edx-platform,MSOpenTech/edx-platform,Ayub-Khan/edx-platform,don-github/edx-platform,ahmadio/edx-platform,jazkarta/edx-platform,gymnasium/edx-platform,Edraak/edx-platform,eestay/edx-platform,nanolearning/edx-platform,kursitet/edx-platform,JCBarahona/edX,jamesblunt/edx-platform,mahendra-r/edx-platform,doismellburning/edx-platform,jelugbo/tundex,alexthered/kienhoc-platform,openfun/edx-platform,motion2015/a3,IndonesiaX/edx-platform,proversity-org/edx-platform,kxliugang/edx-platform,prarthitm/edxplatform,rhndg/openedx,jswope00/griffinx,ak2703/edx-platform,dkarakats/edx-platform,pomegranited/edx-platform,naresh21/synergetics-edx-platform,sudheerchintala/LearnEraPlatForm,simbs/edx-platform,nikolas/edx-platform,motion2015/edx-platform,CourseTalk/edx-platform,mtlchun/edx,mbareta/edx-platform-ft,morenopc/edx-platform,analyseuc3m/ANALYSE-v1,hkawasaki/kawasaki-aio8-2,UXE/local-edx,longmen21/edx-platform,shurihell/testasia,JioEducation/edx-platform,jazztpt/edx-platform,EDUlib/edx-platform,carsongee/edx-platform,hkawasaki/kawasaki-aio8-1,kmoocdev/edx-platform,chrisndodge/edx-platform,unicri/edx-platform,utecuy/edx-platform,inares/edx-platform,motion2015/a3,OmarIthawi/edx-platform,don-github/edx-platform,deepsrijit1105/edx-platform,defance/edx-platform,chand3040/cloud_that,amir-qayyum-khan/edx-platform,zofuthan/edx-platform,xingyepei/edx-platform,jamesblunt/edx-platform,miptliot/edx-platform,y12uc231/edx-platform,cecep-edu/edx-platform,shubhdev/edx-platform,nttks/edx-platform,mahendra-r/edx-platform,jjmiranda/edx-platform,waheedahmed/edx-platform,doganov/edx-platform,sudheerchintala/LearnEraPlatForm,J861449197/edx-platform,defance/edx-platform,synergeticsedx/deployment-wipro,deepsrijit1105/edx-platform,y12uc231/edx-platform,Unow/edx-platform,jswope00/GAI,DefyVentures/edx-platform,unicri/edx-platform,kmoocdev/edx-platform,morenopc/edx-platform,tanmaykm/edx-platform,shubhdev/openedx,B-MOOC/edx-platform,Shrhawk/edx-platform,philanthropy-u/edx-platform,JioEducation/edx-platform,dsajkl/reqiop,jolyonb/edx-platform,xuxiao19910803/edx-platform,jazztpt/edx-platform,kmoocdev2/edx-platform,zofuthan/edx-platform,xuxiao19910803/edx,zadgroup/edx-platform,xinjiguaike/edx-platform,bitifirefly/edx-platform,yokose-ks/edx-platform,procangroup/edx-platform,ahmadio/edx-platform,beacloudgenius/edx-platform,miptliot/edx-platform,polimediaupv/edx-platform,pabloborrego93/edx-platform,vikas1885/test1,dcosentino/edx-platform,ZLLab-Mooc/edx-platform,zerobatu/edx-platform,a-parhom/edx-platform,DefyVentures/edx-platform,msegado/edx-platform,marcore/edx-platform,hkawasaki/kawasaki-aio8-0,mitocw/edx-platform,sameetb-cuelogic/edx-platform-test,andyzsf/edx,pku9104038/edx-platform,antonve/s4-project-mooc,jbzdak/edx-platform,UXE/local-edx,LearnEra/LearnEraPlaftform,teltek/edx-platform,dsajkl/reqiop,bigdatauniversity/edx-platform,shubhdev/edx-platform,mjirayu/sit_academy,morenopc/edx-platform,xinjiguaike/edx-platform,andyzsf/edx,doganov/edx-platform,peterm-itr/edx-platform,beni55/edx-platform,mahendra-r/edx-platform,Edraak/circleci-edx-platform,eemirtekin/edx-platform,bdero/edx-platform,benpatterson/edx-platform,antonve/s4-project-mooc,gsehub/edx-platform,unicri/edx-platform,appsembler/edx-platform,devs1991/test_edx_docmode,shubhdev/edxOnBaadal,cyanna/edx-platform,bigdatauniversity/edx-platform,TeachAtTUM/edx-platform,tiagochiavericosta/edx-platform,zhenzhai/edx-platform,cecep-edu/edx-platform,longmen21/edx-platform,ESOedX/edx-platform,cecep-edu/edx-platform,nikolas/edx-platform,torchingloom/edx-platform,LICEF/edx-platform,franosincic/edx-platform,jswope00/griffinx,adoosii/edx-platform,nttks/edx-platform,JCBarahona/edX,xuxiao19910803/edx-platform,4eek/edx-platform,arifsetiawan/edx-platform,msegado/edx-platform,ampax/edx-platform,benpatterson/edx-platform,ferabra/edx-platform,fintech-circle/edx-platform,JioEducation/edx-platform,caesar2164/edx-platform,UOMx/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,mitocw/edx-platform,jswope00/GAI,cpennington/edx-platform,4eek/edx-platform,jbassen/edx-platform,Endika/edx-platform,zadgroup/edx-platform,benpatterson/edx-platform,nanolearning/edx-platform,jonathan-beard/edx-platform,mbareta/edx-platform-ft,shubhdev/openedx,LICEF/edx-platform,antoviaque/edx-platform,IONISx/edx-platform,jelugbo/tundex,EDUlib/edx-platform,franosincic/edx-platform,Shrhawk/edx-platform,raccoongang/edx-platform,TeachAtTUM/edx-platform,ZLLab-Mooc/edx-platform,romain-li/edx-platform,valtech-mooc/edx-platform,ahmadiga/min_edx,alu042/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearningllc/edx-platform-cypress-2,jamesblunt/edx-platform,jazkarta/edx-platform,jazkarta/edx-platform-for-isc,gsehub/edx-platform,WatanabeYasumasa/edx-platform,marcore/edx-platform,DNFcode/edx-platform,ubc/edx-platform,JCBarahona/edX,edry/edx-platform,romain-li/edx-platform,nttks/jenkins-test,zadgroup/edx-platform,ovnicraft/edx-platform,Kalyzee/edx-platform,amir-qayyum-khan/edx-platform,lduarte1991/edx-platform,ferabra/edx-platform,ampax/edx-platform-backup,cecep-edu/edx-platform,kmoocdev/edx-platform,jruiperezv/ANALYSE,ampax/edx-platform-backup,torchingloom/edx-platform,antoviaque/edx-platform,defance/edx-platform,devs1991/test_edx_docmode,nanolearningllc/edx-platform-cypress-2,Unow/edx-platform,atsolakid/edx-platform,wwj718/ANALYSE,shubhdev/edxOnBaadal,Lektorium-LLC/edx-platform,martynovp/edx-platform,edx/edx-platform,CredoReference/edx-platform,alexthered/kienhoc-platform,motion2015/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,ESOedX/edx-platform,fly19890211/edx-platform,ahmadiga/min_edx,xingyepei/edx-platform,JCBarahona/edX,don-github/edx-platform,olexiim/edx-platform,jruiperezv/ANALYSE,abdoosh00/edraak,hkawasaki/kawasaki-aio8-2,zhenzhai/edx-platform,vismartltd/edx-platform,simbs/edx-platform,chand3040/cloud_that,Lektorium-LLC/edx-platform,ferabra/edx-platform,BehavioralInsightsTeam/edx-platform,shashank971/edx-platform,leansoft/edx-platform,etzhou/edx-platform,MakeHer/edx-platform,caesar2164/edx-platform,stvstnfrd/edx-platform,antoviaque/edx-platform,dcosentino/edx-platform,sudheerchintala/LearnEraPlatForm,jazkarta/edx-platform-for-isc,longmen21/edx-platform,dkarakats/edx-platform,nanolearningllc/edx-platform-cypress,kmoocdev2/edx-platform,edx-solutions/edx-platform,chudaol/edx-platform,jbassen/edx-platform,AkA84/edx-platform,chauhanhardik/populo_2,Softmotions/edx-platform,simbs/edx-platform,mjirayu/sit_academy,Edraak/circleci-edx-platform,hkawasaki/kawasaki-aio8-0,arifsetiawan/edx-platform,dkarakats/edx-platform,alexthered/kienhoc-platform,nttks/jenkins-test,amir-qayyum-khan/edx-platform,ahmadio/edx-platform,motion2015/edx-platform,cognitiveclass/edx-platform,auferack08/edx-platform,sameetb-cuelogic/edx-platform-test,prarthitm/edxplatform,CredoReference/edx-platform,nttks/edx-platform,BehavioralInsightsTeam/edx-platform,raccoongang/edx-platform,eduNEXT/edx-platform,leansoft/edx-platform,proversity-org/edx-platform,jswope00/GAI,utecuy/edx-platform,fly19890211/edx-platform,alu042/edx-platform,cselis86/edx-platform,abdoosh00/edraak,TeachAtTUM/edx-platform,jolyonb/edx-platform,10clouds/edx-platform,mtlchun/edx,shabab12/edx-platform,jazztpt/edx-platform,chand3040/cloud_that,Ayub-Khan/edx-platform,chudaol/edx-platform,cpennington/edx-platform,Edraak/edraak-platform,cecep-edu/edx-platform,devs1991/test_edx_docmode,jzoldak/edx-platform,DefyVentures/edx-platform,cognitiveclass/edx-platform,antonve/s4-project-mooc,UOMx/edx-platform,louyihua/edx-platform,nikolas/edx-platform,hamzehd/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,ahmadiga/min_edx,jazkarta/edx-platform-for-isc,analyseuc3m/ANALYSE-v1,eduNEXT/edunext-platform,knehez/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,xuxiao19910803/edx-platform,angelapper/edx-platform,caesar2164/edx-platform,zerobatu/edx-platform,longmen21/edx-platform,xinjiguaike/edx-platform,devs1991/test_edx_docmode,rue89-tech/edx-platform,Livit/Livit.Learn.EdX,playm2mboy/edx-platform,chauhanhardik/populo_2,pomegranited/edx-platform,chauhanhardik/populo,rue89-tech/edx-platform,jazkarta/edx-platform,iivic/BoiseStateX,nagyistoce/edx-platform,waheedahmed/edx-platform,andyzsf/edx,Livit/Livit.Learn.EdX,pabloborrego93/edx-platform,mtlchun/edx,tiagochiavericosta/edx-platform,doganov/edx-platform,etzhou/edx-platform,IONISx/edx-platform,Kalyzee/edx-platform,cyanna/edx-platform,jolyonb/edx-platform,CourseTalk/edx-platform,jzoldak/edx-platform,jazkarta/edx-platform-for-isc,ferabra/edx-platform,tiagochiavericosta/edx-platform,bdero/edx-platform,WatanabeYasumasa/edx-platform,jonathan-beard/edx-platform,xinjiguaike/edx-platform,ahmedaljazzar/edx-platform,polimediaupv/edx-platform,Stanford-Online/edx-platform,caesar2164/edx-platform,halvertoluke/edx-platform,mjirayu/sit_academy,zerobatu/edx-platform,devs1991/test_edx_docmode,valtech-mooc/edx-platform,arbrandes/edx-platform,zofuthan/edx-platform,antoviaque/edx-platform,Ayub-Khan/edx-platform,cyanna/edx-platform,jonathan-beard/edx-platform,pku9104038/edx-platform,Edraak/edx-platform,ampax/edx-platform,gymnasium/edx-platform,SravanthiSinha/edx-platform,kursitet/edx-platform,chauhanhardik/populo_2,chauhanhardik/populo_2,jbzdak/edx-platform,chauhanhardik/populo,ahmadio/edx-platform,mcgachey/edx-platform,zofuthan/edx-platform,adoosii/edx-platform,fly19890211/edx-platform,SivilTaram/edx-platform,gsehub/edx-platform,Lektorium-LLC/edx-platform,cselis86/edx-platform,edry/edx-platform,a-parhom/edx-platform,amir-qayyum-khan/edx-platform,fintech-circle/edx-platform,edry/edx-platform,eemirtekin/edx-platform,dsajkl/123,CourseTalk/edx-platform,vasyarv/edx-platform,inares/edx-platform,mbareta/edx-platform-ft,cselis86/edx-platform,waheedahmed/edx-platform,MakeHer/edx-platform,fly19890211/edx-platform,valtech-mooc/edx-platform,naresh21/synergetics-edx-platform,jazztpt/edx-platform,B-MOOC/edx-platform,andyzsf/edx,hamzehd/edx-platform,adoosii/edx-platform,solashirai/edx-platform,Stanford-Online/edx-platform,morenopc/edx-platform,openfun/edx-platform,beni55/edx-platform,angelapper/edx-platform,ak2703/edx-platform,zubair-arbi/edx-platform,doganov/edx-platform,playm2mboy/edx-platform,ESOedX/edx-platform,kmoocdev2/edx-platform,bigdatauniversity/edx-platform,dcosentino/edx-platform,doismellburning/edx-platform,Softmotions/edx-platform,mushtaqak/edx-platform,kamalx/edx-platform,jruiperezv/ANALYSE,peterm-itr/edx-platform,dcosentino/edx-platform,arbrandes/edx-platform,SravanthiSinha/edx-platform,zofuthan/edx-platform,ampax/edx-platform-backup,kamalx/edx-platform,jjmiranda/edx-platform,UXE/local-edx,RPI-OPENEDX/edx-platform,Unow/edx-platform,Edraak/edx-platform,eestay/edx-platform,dsajkl/reqiop,UOMx/edx-platform,mushtaqak/edx-platform,synergeticsedx/deployment-wipro,dsajkl/123,kamalx/edx-platform,alu042/edx-platform,openfun/edx-platform,wwj718/edx-platform,rhndg/openedx,SivilTaram/edx-platform,raccoongang/edx-platform,philanthropy-u/edx-platform,romain-li/edx-platform,dsajkl/123,vasyarv/edx-platform,alexthered/kienhoc-platform,Shrhawk/edx-platform,motion2015/a3,vismartltd/edx-platform,peterm-itr/edx-platform,edx-solutions/edx-platform,vikas1885/test1,appliedx/edx-platform,peterm-itr/edx-platform,kxliugang/edx-platform,eduNEXT/edx-platform,Softmotions/edx-platform,shurihell/testasia,leansoft/edx-platform,tanmaykm/edx-platform,xuxiao19910803/edx,inares/edx-platform,inares/edx-platform,iivic/BoiseStateX,atsolakid/edx-platform,analyseuc3m/ANALYSE-v1,hmcmooc/muddx-platform,angelapper/edx-platform,louyihua/edx-platform,JCBarahona/edX,auferack08/edx-platform,kmoocdev2/edx-platform,utecuy/edx-platform,MSOpenTech/edx-platform,morenopc/edx-platform,a-parhom/edx-platform,nagyistoce/edx-platform,IndonesiaX/edx-platform,jamesblunt/edx-platform,appliedx/edx-platform,prarthitm/edxplatform,eduNEXT/edunext-platform,Edraak/edraak-platform,Unow/edx-platform,dcosentino/edx-platform,proversity-org/edx-platform,nikolas/edx-platform,Ayub-Khan/edx-platform,jamiefolsom/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,edx/edx-platform,jruiperezv/ANALYSE,appsembler/edx-platform,wwj718/edx-platform,y12uc231/edx-platform,nanolearningllc/edx-platform-cypress,utecuy/edx-platform,SivilTaram/edx-platform,itsjeyd/edx-platform,IONISx/edx-platform,mcgachey/edx-platform,mahendra-r/edx-platform,alexthered/kienhoc-platform,antonve/s4-project-mooc,DNFcode/edx-platform,pepeportela/edx-platform,Shrhawk/edx-platform,jelugbo/tundex,ampax/edx-platform-backup,rismalrv/edx-platform,angelapper/edx-platform,rismalrv/edx-platform,vismartltd/edx-platform,B-MOOC/edx-platform,kmoocdev/edx-platform,Semi-global/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform-backup,deepsrijit1105/edx-platform,hastexo/edx-platform,xuxiao19910803/edx,iivic/BoiseStateX,procangroup/edx-platform,doismellburning/edx-platform,auferack08/edx-platform,zadgroup/edx-platform,Semi-global/edx-platform,analyseuc3m/ANALYSE-v1,pepeportela/edx-platform,doganov/edx-platform,kamalx/edx-platform,dsajkl/reqiop,simbs/edx-platform,pku9104038/edx-platform,nttks/jenkins-test,zubair-arbi/edx-platform,vikas1885/test1,jazztpt/edx-platform,doismellburning/edx-platform,shabab12/edx-platform,yokose-ks/edx-platform,shubhdev/edxOnBaadal,sameetb-cuelogic/edx-platform-test,rismalrv/edx-platform,appsembler/edx-platform,etzhou/edx-platform,wwj718/ANALYSE,adoosii/edx-platform,10clouds/edx-platform,eestay/edx-platform,solashirai/edx-platform,arbrandes/edx-platform,shubhdev/edxOnBaadal,cyanna/edx-platform,WatanabeYasumasa/edx-platform,eemirtekin/edx-platform,MakeHer/edx-platform,jjmiranda/edx-platform,polimediaupv/edx-platform,shabab12/edx-platform,halvertoluke/edx-platform,OmarIthawi/edx-platform,rismalrv/edx-platform,CredoReference/edx-platform,eemirtekin/edx-platform,xingyepei/edx-platform,AkA84/edx-platform,kursitet/edx-platform,dsajkl/123,fly19890211/edx-platform,itsjeyd/edx-platform,mcgachey/edx-platform,nagyistoce/edx-platform,antonve/s4-project-mooc,iivic/BoiseStateX,WatanabeYasumasa/edx-platform,hmcmooc/muddx-platform,devs1991/test_edx_docmode,dsajkl/123,BehavioralInsightsTeam/edx-platform,motion2015/a3,zadgroup/edx-platform,motion2015/edx-platform,edx/edx-platform,playm2mboy/edx-platform,SravanthiSinha/edx-platform,defance/edx-platform,bitifirefly/edx-platform,Semi-global/edx-platform,IONISx/edx-platform,Endika/edx-platform,Stanford-Online/edx-platform,bdero/edx-platform,xuxiao19910803/edx-platform,Edraak/edraak-platform,franosincic/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,shashank971/edx-platform,Shrhawk/edx-platform,edry/edx-platform,chauhanhardik/populo,cognitiveclass/edx-platform,vasyarv/edx-platform,jswope00/GAI,mcgachey/edx-platform,hkawasaki/kawasaki-aio8-0,alu042/edx-platform,LearnEra/LearnEraPlaftform,chudaol/edx-platform,y12uc231/edx-platform,Edraak/circleci-edx-platform,motion2015/edx-platform,4eek/edx-platform,playm2mboy/edx-platform,CredoReference/edx-platform,SivilTaram/edx-platform,philanthropy-u/edx-platform,jruiperezv/ANALYSE,mushtaqak/edx-platform,utecuy/edx-platform,Kalyzee/edx-platform,naresh21/synergetics-edx-platform,LICEF/edx-platform,inares/edx-platform,B-MOOC/edx-platform,dkarakats/edx-platform,10clouds/edx-platform,appsembler/edx-platform,vikas1885/test1,jamesblunt/edx-platform,TeachAtTUM/edx-platform,yokose-ks/edx-platform,ahmadiga/min_edx,shurihell/testasia,vikas1885/test1,tanmaykm/edx-platform,olexiim/edx-platform,bdero/edx-platform,Edraak/edx-platform,Kalyzee/edx-platform,jzoldak/edx-platform,eestay/edx-platform,valtech-mooc/edx-platform,martynovp/edx-platform,Semi-global/edx-platform,MSOpenTech/edx-platform,synergeticsedx/deployment-wipro,mtlchun/edx,rhndg/openedx,OmarIthawi/edx-platform,polimediaupv/edx-platform,shurihell/testasia,proversity-org/edx-platform,Stanford-Online/edx-platform,wwj718/ANALYSE,cpennington/edx-platform,zhenzhai/edx-platform,carsongee/edx-platform,martynovp/edx-platform,marcore/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,jswope00/griffinx,arifsetiawan/edx-platform,simbs/edx-platform,IONISx/edx-platform,shubhdev/openedx,abdoosh00/edraak,nagyistoce/edx-platform,dkarakats/edx-platform,tiagochiavericosta/edx-platform,synergeticsedx/deployment-wipro,tanmaykm/edx-platform,appliedx/edx-platform,martynovp/edx-platform,xingyepei/edx-platform,jbzdak/edx-platform,jolyonb/edx-platform,mushtaqak/edx-platform,benpatterson/edx-platform,benpatterson/edx-platform,eestay/edx-platform,Endika/edx-platform,unicri/edx-platform,torchingloom/edx-platform,hastexo/edx-platform,arbrandes/edx-platform,Endika/edx-platform,jbassen/edx-platform,MSOpenTech/edx-platform,pomegranited/edx-platform,shashank971/edx-platform,vismartltd/edx-platform,fintech-circle/edx-platform,wwj718/edx-platform,Softmotions/edx-platform,a-parhom/edx-platform,y12uc231/edx-platform,UOMx/edx-platform,nttks/edx-platform,4eek/edx-platform,DNFcode/edx-platform,jbassen/edx-platform,ZLLab-Mooc/edx-platform,edx/edx-platform,chand3040/cloud_that,ak2703/edx-platform,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-1,MakeHer/edx-platform,motion2015/a3,pku9104038/edx-platform,zubair-arbi/edx-platform,arifsetiawan/edx-platform,adoosii/edx-platform,jbzdak/edx-platform,ubc/edx-platform,chauhanhardik/populo_2,hkawasaki/kawasaki-aio8-2,jjmiranda/edx-platform,nanolearning/edx-platform,procangroup/edx-platform,beacloudgenius/edx-platform,nanolearningllc/edx-platform-cypress,don-github/edx-platform,mtlchun/edx,shashank971/edx-platform,mcgachey/edx-platform,hmcmooc/muddx-platform,ak2703/edx-platform,etzhou/edx-platform,franosincic/edx-platform,openfun/edx-platform,zhenzhai/edx-platform,SivilTaram/edx-platform,jzoldak/edx-platform,LearnEra/LearnEraPlaftform,RPI-OPENEDX/edx-platform,raccoongang/edx-platform,halvertoluke/edx-platform,SravanthiSinha/edx-platform,kxliugang/edx-platform,gymnasium/edx-platform,LICEF/edx-platform,hamzehd/edx-platform,olexiim/edx-platform,devs1991/test_edx_docmode,nanolearningllc/edx-platform-cypress-2,beacloudgenius/edx-platform,gsehub/edx-platform,AkA84/edx-platform,eemirtekin/edx-platform,EDUlib/edx-platform,sudheerchintala/LearnEraPlatForm,jazkarta/edx-platform,knehez/edx-platform,ahmadio/edx-platform,eduNEXT/edunext-platform,Edraak/circleci-edx-platform,lduarte1991/edx-platform,sameetb-cuelogic/edx-platform-test,bigdatauniversity/edx-platform,teltek/edx-platform,Livit/Livit.Learn.EdX,jazkarta/edx-platform,chrisndodge/edx-platform,shubhdev/edx-platform,kamalx/edx-platform,itsjeyd/edx-platform,ampax/edx-platform,J861449197/edx-platform,ak2703/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,yokose-ks/edx-platform,iivic/BoiseStateX,polimediaupv/edx-platform,kursitet/edx-platform,mjirayu/sit_academy,BehavioralInsightsTeam/edx-platform,jswope00/griffinx,RPI-OPENEDX/edx-platform,tiagochiavericosta/edx-platform,leansoft/edx-platform,jswope00/griffinx,bitifirefly/edx-platform,kursitet/edx-platform,jbassen/edx-platform,pabloborrego93/edx-platform,cognitiveclass/edx-platform,LICEF/edx-platform,vasyarv/edx-platform,MSOpenTech/edx-platform,marcore/edx-platform,miptliot/edx-platform,zubair-arbi/edx-platform,10clouds/edx-platform,IndonesiaX/edx-platform,ahmedaljazzar/edx-platform,jonathan-beard/edx-platform,atsolakid/edx-platform,itsjeyd/edx-platform,ovnicraft/edx-platform,shashank971/edx-platform,gymnasium/edx-platform,J861449197/edx-platform,DNFcode/edx-platform,nttks/jenkins-test,beni55/edx-platform,mahendra-r/edx-platform,louyihua/edx-platform,mitocw/edx-platform,DNFcode/edx-platform,beacloudgenius/edx-platform,carsongee/edx-platform,Edraak/edx-platform,shubhdev/openedx,cyanna/edx-platform,hastexo/edx-platform,Edraak/edraak-platform,4eek/edx-platform,knehez/edx-platform,wwj718/ANALYSE,cselis86/edx-platform,jbzdak/edx-platform,cognitiveclass/edx-platform,wwj718/edx-platform,RPI-OPENEDX/edx-platform,wwj718/ANALYSE,rue89-tech/edx-platform,bitifirefly/edx-platform,nagyistoce/edx-platform,ubc/edx-platform,chand3040/cloud_that,jazkarta/edx-platform-for-isc,carsongee/edx-platform,shubhdev/edxOnBaadal,lduarte1991/edx-platform,shubhdev/edx-platform,ferabra/edx-platform,waheedahmed/edx-platform,zerobatu/edx-platform |
2afd2467c16969b10496ae96e17b9dce7911f232 | db.py | db.py | import sqlite3
connection = sqlite3.connect('data.db')
class SavedRoll:
@staticmethod
def save(user, name, args):
pass
@staticmethod
def get(user, name):
pass
@staticmethod
def delete(user, name):
pass
| class SavedRollManager:
"""
Class for managing saved rolls.
Attributes:
connection (sqlite3.Connection): Database connection used by manager
"""
def __init__(self, connection):
"""
Create a SavedRollManager instance.
Args:
connection (sqlite3.Connection): Database connection to use
"""
self.conn = connection
def save(self, user, chat, name, args):
"""
Save a roll to the database.
Args:
user (int): User ID to save roll for
chat (int): Chat ID to save roll for
name: Name of saved roll
args: Arguments to save for roll
"""
pass
def get(self, user, chat, name):
"""
Get a saved roll from the database.
Args:
user (int): User ID to get roll for
chat (int): Chat ID to get roll for
name: Name of saved roll
Returns:
list: List of arguments of saved roll
"""
pass
def delete(self, user, chat, name):
"""
Delete a saved roll from the database.
Args:
user (int): User ID to delete roll from
chat (int): Chat ID to delete roll from
name: Name of saved roll
"""
pass
| Make SavedRollManager less static, also docstrings | Make SavedRollManager less static, also docstrings
| Python | mit | foxscotch/foxrollbot |
b54507e05475dfc11e04678ee358476f571323b2 | plugins/Tools/PerObjectSettingsTool/__init__.py | plugins/Tools/PerObjectSettingsTool/__init__.py | # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import PerObjectSettingsTool
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Settings Per Object Tool"),
"author": "Ultimaker",
"version": "1.0",
"description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Per Object Settings."),
"api": 2
},
"tool": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings"),
"description": i18n_catalog.i18nc("@info:tooltip", "Configure Settings Per Object"),
"icon": "setting_per_object",
"tool_panel": "PerObjectSettingsPanel.qml"
},
}
def register(app):
return { "tool": PerObjectSettingsTool.PerObjectSettingsTool() }
| # Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from . import PerObjectSettingsTool
from UM.i18n import i18nCatalog
i18n_catalog = i18nCatalog("uranium")
def getMetaData():
return {
"plugin": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings Tool"),
"author": "Ultimaker",
"version": "1.0",
"description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Per Object Settings."),
"api": 2
},
"tool": {
"name": i18n_catalog.i18nc("@label", "Per Object Settings"),
"description": i18n_catalog.i18nc("@info:tooltip", "Configure Per Object Settings"),
"icon": "setting_per_object",
"tool_panel": "PerObjectSettingsPanel.qml"
},
}
def register(app):
return { "tool": PerObjectSettingsTool.PerObjectSettingsTool() }
| Normalize strings for per object settings | Normalize strings for per object settings
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium |
ec61fec1ae60a565110876101dabad352e3ea46b | core/management/commands/delete_old_sessions.py | core/management/commands/delete_old_sessions.py | from datetime import datetime
from django.core.management.base import BaseCommand
from django.contrib.sessions.models import Session
class Command(BaseCommand):
args = '<count count ...>'
help = "Delete old sessions"
def handle(self, *args, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions):
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
| from datetime import datetime
from django.core.management.base import NoArgsCommand
from django.contrib.sessions.models import Session
class Command(NoArgsCommand):
help = "Delete old sessions"
def handle_noargs(self, **options):
old_sessions = Session.objects.filter(expire_date__lt=datetime.now())
self.stdout.write("Deleting {0} expired sessions".format(
old_sessions.count()
)
)
for index, session in enumerate(old_sessions)[:10000]:
session.delete()
if str(index).endswith('000'):
self.stdout.write("{0} records deleted".format(index))
self.stdout.write("{0} expired sessions remaining".format(
Session.objects.filter(expire_date__lt=datetime.now())
)
)
| Add delete old sessions command | Add delete old sessions command
| Python | mit | QLGu/djangopackages,pydanny/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,pydanny/djangopackages |
648c7fb94f92e8ef722af8c9462c9ff65bf643fc | intelmq/bots/collectors/mail/collector_mail_body.py | intelmq/bots/collectors/mail/collector_mail_body.py | # -*- coding: utf-8 -*-
"""
Uses the common mail iteration method from the lib file.
"""
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| # -*- coding: utf-8 -*-
"""
Uses the common mail iteration method from the lib file.
"""
from .lib import MailCollectorBot
class MailBodyCollectorBot(MailCollectorBot):
def init(self):
super().init()
self.content_types = getattr(self.parameters, 'content_types', ('plain', 'html'))
if isinstance(self.content_types, str):
self.content_types = [x.strip() for x in self.content_types.split(',')]
elif not self.content_types or self.content_types is True: # empty string, null, false, true
self.content_types = ('plain', 'html')
def process_message(self, uid, message):
seen = False
for content_type in self.content_types:
for body in message.body[content_type]:
if not body:
continue
report = self.new_report()
report["raw"] = body
report["extra.email_subject"] = message.subject
report["extra.email_from"] = ','.join(x['email'] for x in message.sent_from)
report["extra.email_message_id"] = message.message_id
report["extra.email_received"] = message.date
self.send_message(report)
# at least one body has successfully been processed
seen = True
return seen
BOT = MailBodyCollectorBot
| Insert date when email was received | Insert date when email was received
Sometimes we receive email reports like "this is happening right now" and there is no date/time included. So if we process emails once per hour - we don't have info about event time. Additional field `extra.email_received` in the mail body collector would help. | Python | agpl-3.0 | aaronkaplan/intelmq,aaronkaplan/intelmq,certtools/intelmq,certtools/intelmq,certtools/intelmq,aaronkaplan/intelmq |
8286aee8eca008e2e469d49e7a426828e4f6c2bf | bin/s3imageresize.py | bin/s3imageresize.py | #!/usr/bin/env python
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Upload a file to Amazon S3 and rotate old backups.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('psize', help="Path to the file to upload.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, args.psize)
| #!/usr/bin/env python
import argparse
from s3imageresize import resize_image_folder
parser = argparse.ArgumentParser(description='Resize all images stored in a folder on Amazon S3.')
parser.add_argument('bucket', help="Name of the Amazon S3 bucket to save the backup file to.")
parser.add_argument('prefix', help="The prefix to add before the filename for the key.")
parser.add_argument('width', help="Maximum width of the image.")
parser.add_argument('height', help="Maximum height of the image.")
args = parser.parse_args()
resize_image_folder(args.bucket, args.prefix, (args.width,args.height))
| Fix parameter descriptions and change size to individual width and height parameters | Fix parameter descriptions and change size to individual width and height parameters
| Python | mit | dirkcuys/s3imageresize |
8e2e08621ca6adea23bc4da2f7b674216bf643f5 | yolk/__init__.py | yolk/__init__.py | """yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.5a0'
| """yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.5'
| Increment patch version to 0.8.5 | Increment patch version to 0.8.5
| Python | bsd-3-clause | myint/yolk,myint/yolk |
7fea0e2fb875a655898915f9f0f8375684d9e6bd | juriscraper/oral_args/united_states/state/__init__.py | juriscraper/oral_args/united_states/state/__init__.py | __all__ = [
'ill', 'illappct_1st_dist' #'md',
]
| __all__ = [
'ill',
'illappct_1st_dist',
#'md',
]
| Clean up to ill imports. | Clean up to ill imports.
Style issue, but it's better to put these vertically. It makes it harder to forget a comma.
| Python | bsd-2-clause | freelawproject/juriscraper,freelawproject/juriscraper |
71339b8f92c9057fe029c5db81db7acce7596607 | app/mod_budget/controller.py | app/mod_budget/controller.py | from flask import Blueprint
budget = Blueprint('budget', __name__, template_folder = 'templates')
@budget.route('/')
def default:
return "Hello World!"
| from flask import Blueprint
budget = Blueprint('budget', __name__, template_folder = 'templates')
@budget.route('/')
def default():
return "Hello World!"
| Fix missing parenthesis for default route in budget module. | Fix missing parenthesis for default route in budget module.
| Python | mit | Zillolo/mana-vault,Zillolo/mana-vault,Zillolo/mana-vault |
945e2def0a106541583907101060a234e6846d27 | sources/bioformats/large_image_source_bioformats/girder_source.py | sources/bioformats/large_image_source_bioformats/girder_source.py | # -*- coding: utf-8 -*-
##############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
| # -*- coding: utf-8 -*-
##############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import cherrypy
from girder_large_image.girder_tilesource import GirderTileSource
from . import BioformatsFileTileSource, _stopJavabridge
cherrypy.engine.subscribe('stop', _stopJavabridge)
class BioformatsGirderTileSource(BioformatsFileTileSource, GirderTileSource):
"""
Provides tile access to Girder items that can be read with bioformats.
"""
cacheName = 'tilesource'
name = 'bioformats'
def mayHaveAdjacentFiles(self, largeImageFile):
# bioformats uses extensions to determine how to open a file, so this
# needs to be set for all file formats.
return True
| Fix reading from hashed file names. | Fix reading from hashed file names.
Bioformats expects file extensions to exist, so flag that we should
always appear as actual, fully-pathed files.
| Python | apache-2.0 | girder/large_image,DigitalSlideArchive/large_image,girder/large_image,girder/large_image,DigitalSlideArchive/large_image,DigitalSlideArchive/large_image |
82f5a5cccb8a7a36adc6f880d3cc1e11b8e596ee | envelope/templatetags/envelope_tags.py | envelope/templatetags/envelope_tags.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
form = context['form']
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""
Template tags related to the contact form.
"""
from django import template
try:
import honeypot
except ImportError: # pragma: no cover
honeypot = None
register = template.Library()
@register.inclusion_tag('envelope/contact_form.html', takes_context=True)
def render_contact_form(context):
"""
Renders the contact form which must be in the template context.
The most common use case for this template tag is to call it in the
template rendered by :class:`~envelope.views.ContactView`. The template
tag will then render a sub-template ``envelope/contact_form.html``.
.. versionadded:: 0.7.0
"""
try:
form = context['form']
except KeyError:
raise template.TemplateSyntaxError("There is no 'form' variable in the template context.")
return {
'form': form,
}
@register.simple_tag
def antispam_fields():
"""
Returns the HTML for any spam filters available.
"""
content = ''
if honeypot:
t = template.Template('{% load honeypot %}{% render_honeypot_field %}')
content += t.render(template.Context({}))
return content
| Raise a more specific error when form is not passed to the template. | Raise a more specific error when form is not passed to the template.
| Python | mit | r4ts0n/django-envelope,r4ts0n/django-envelope,affan2/django-envelope,affan2/django-envelope,zsiciarz/django-envelope,zsiciarz/django-envelope |
f1e2859f5535d7eddb13c10e71f9c0074c94c719 | axes_login_actions/signals.py | axes_login_actions/signals.py | # -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
| # -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt, dispatch_uid='axes_login_actions_post_save')
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
| Add "dispatch_uid" to ensure we connect the signal only once | Add "dispatch_uid" to ensure we connect the signal only once
| Python | bsd-3-clause | eht16/django-axes-login-actions |
ea324a30823fbf18c72dd639b9c43d3ecb57b034 | txircd/modules/extra/services/account_extban.py | txircd/modules/extra/services/account_extban.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
if fnmatchcase(userAccount, mask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
from fnmatch import fnmatchcase
class AccountExtban(ModuleData):
implements(IPlugin, IModuleData)
name = "AccountExtban"
def actions(self):
return [ ("usermatchban-R", 1, self.matchBan),
("usermetadataupdate", 10, self.updateBansOnAccountChange) ]
def matchBan(self, user, matchNegated, mask):
if not user.metadataKeyExists("account"):
return matchNegated
userAccount = ircLower(user.metadataValue("account"))
lowerMask = ircLower(mask)
if fnmatchcase(userAccount, lowerMask):
return not matchNegated
return matchNegated
def updateBansOnAccountChange(self, user, key, oldValue, value, visibility, setByUser, fromServer = None):
if key != "account":
return
self.ircd.runActionStandard("updateuserbancache", user)
matchExtban = AccountExtban() | Fix matching users against R: extbans | Fix matching users against R: extbans
| Python | bsd-3-clause | Heufneutje/txircd |
d649e0ff501604d9b8b24bd69a7545528332c05c | polling_stations/apps/pollingstations/models.py | polling_stations/apps/pollingstations/models.py | from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
return self.name
| from django.contrib.gis.db import models
from councils.models import Council
class PollingStation(models.Model):
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
postcode = models.CharField(blank=True, null=True, max_length=100)
address = models.TextField(blank=True, null=True)
location = models.PointField(null=True, blank=True)
objects = models.GeoManager()
class PollingDistrict(models.Model):
name = models.CharField(blank=True, null=True, max_length=255)
council = models.ForeignKey(Council, null=True)
internal_council_id = models.CharField(blank=True, max_length=100)
extra_id = models.CharField(blank=True, null=True, max_length=100)
area = models.MultiPolygonField(null=True, blank=True, geography=True)
objects = models.GeoManager()
def __unicode__(self):
name = self.name or "Unnamed"
return "%s (%s)" % (name, self.council)
| Fix unicode for unknown names | Fix unicode for unknown names
| Python | bsd-3-clause | andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations |
c5996b4a933f2d27251e8d85f3392b715e130759 | mapentity/templatetags/convert_tags.py | mapentity/templatetags/convert_tags.py | import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype | Support conversion format as extension, instead of mimetype
| Python | bsd-3-clause | Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,Anaethelion/django-mapentity |
5885c053e9bf20c7b91ebc2c8aebd1dfb9c78a46 | avalonstar/components/broadcasts/models.py | avalonstar/components/broadcasts/models.py | # -*- coding: utf-8 -*-
from django.db import models
from components.games.models import Game
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
def __unicode__(self):
return 'Episode %s' % self.number
| # -*- coding: utf-8 -*-
from django.db import models
from components.games.models import Game
class Series(models.Model):
name = models.CharField(max_length=200)
def __unicode__(self):
return '%s' % self.name
class Broadcast(models.Model):
airdate = models.DateField()
status = models.CharField(max_length=200)
number = models.IntegerField(blank=True, null=True)
# ...
games = models.ManyToManyField(Game, related_name='appears_on')
series = models.ForeignKey(Series, related_name='broadcasts')
# ...
def __unicode__(self):
return 'Episode %s' % self.number
| Add the concept of series (like Whatever Wednesday). | Add the concept of series (like Whatever Wednesday).
| Python | apache-2.0 | bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv |
0d176d6d40c5267a8672e2f8511eeec3ff7e4102 | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='cli_tools',
version='0.2.4',
author='Kevin L. Mitchell',
author_email='[email protected]',
url='https://github.com/klmitch/cli_utils',
description="Command Line Interface Tools",
long_description=readfile('README.rst'),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or '
'later (GPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: User Interfaces',
],
py_modules=['cli_tools'],
requires=readreq('requirements.txt'),
tests_require=readreq('test-requirements.txt'),
)
| #!/usr/bin/env python
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='cli_tools',
version='0.2.5',
author='Kevin L. Mitchell',
author_email='[email protected]',
url='https://github.com/klmitch/cli_utils',
description="Command Line Interface Tools",
long_description=readfile('README.rst'),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or '
'later (GPLv3+)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: User Interfaces',
],
py_modules=['cli_tools'],
install_requires=readreq('requirements.txt'),
tests_require=readreq('test-requirements.txt'),
)
| Use install_requires= and bump version | Use install_requires= and bump version
It appears that the "requires" keyword argument to setup() doesn't do
the right thing. This may be a brain-o on my part. This switches
back to using "install_requires" and bumps the version for release.
| Python | apache-2.0 | klmitch/cli_tools |
0e3edd3be1748dd62323037760337b8819adaeea | features/steps/use_steplib_behave4cmd.py | features/steps/use_steplib_behave4cmd.py | # -*- coding: utf-8 -*-
"""
Use behave4cmd0 step library (predecessor of behave4cmd).
"""
# -- REGISTER-STEPS:
import behave4cmd0.command_steps
| # -*- coding: utf-8 -*-
"""
Use behave4cmd0 step library (predecessor of behave4cmd).
"""
# -- REGISTER-STEPS:
import behave4cmd0.__all_steps__
| Use all behave4cmd0 steps now. | Use all behave4cmd0 steps now.
| Python | bsd-2-clause | hugeinc/behave-parallel |
7bd4d126269e516f3a9a54721e3d710e19120eb4 | app.py | app.py | from flask import Flask, jsonify
from flask.helpers import make_response
import urls
import scrapy
app = Flask(__name__)
@app.route('/fuelprice/v1.0/petrol/', methods=['GET'])
def petrol_prices_all():
all_petrol_prices = scrapy.scrape_all_petrol_prices()
return make_response(jsonify(all_petrol_prices))
@app.route('/fuelprice/v1.0/petrol/<string:city_name>', methods=['GET'])
def petrol_price(city_name):
url = urls.petrol_url(city_name)
price = scrapy.scrape_latest_petrol_price(url)
return make_response(jsonify({city_name.title() : price}))
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}))
@app.route('/')
def index():
return "Hello, World!"
if __name__ == '__main__':
app.run(debug=True)
| from flask import Flask, jsonify
from flask.helpers import make_response
import urls
import scrapy
app = Flask(__name__)
@app.route('/fuelprice/v1.0/petrol/', methods=['GET'])
def petrol_prices_all():
all_petrol_prices = scrapy.scrape_all_petrol_prices()
return make_response(jsonify(all_petrol_prices))
@app.route('/fuelprice/v1.0/petrol/<string:city_name>', methods=['GET'])
def petrol_price(city_name):
url = urls.petrol_url(city_name.lower())
price = scrapy.scrape_latest_petrol_price(url)
return make_response(jsonify({city_name.title() : price}))
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}))
@app.route('/')
def index():
return "Hello, World!"
if __name__ == '__main__':
app.run(debug=True)
| Handle different cases of city names | Handle different cases of city names
| Python | apache-2.0 | phalgun/fuelprice-api |
c7f6e0c2e9c5be112a7576c3d2a1fc8a79eb9f18 | brasilcomvc/settings/staticfiles.py | brasilcomvc/settings/staticfiles.py | import os
import sys
# Disable django-pipeline when in test mode
PIPELINE_ENABLED = 'test' not in sys.argv
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| import os
import sys
# Main project directory
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
STATIC_BASE_DIR = os.path.join(BASE_DIR, '../webroot')
# Static file dirs
STATIC_ROOT = os.path.join(STATIC_BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_BASE_DIR, 'media')
# Static file URLs
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
# django-pipeline settings
STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
if 'test' in sys.argv:
STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage'
STATICFILES_FINDERS = (
'pipeline.finders.AppDirectoriesFinder',
'pipeline.finders.PipelineFinder',
)
PIPELINE_COMPILERS = (
'pipeline.compilers.stylus.StylusCompiler',
)
# Stylus configuration
PIPELINE_STYLUS_ARGUMENTS = ' '.join([
'--include {path}/common/static/styl', # Expose common styl lib dir
'--use kouto-swiss',
]).format(path=BASE_DIR)
# Packaging specs for CSS
PIPELINE_CSS = {
'app': {
'source_filenames': [
# ...
],
'output_filename': 'css/app.css',
}
}
# Packaging specs for JavaScript
PIPELINE_JS = {
}
| Fix django-pipeline configuration for development/test | fix(set): Fix django-pipeline configuration for development/test
| Python | apache-2.0 | brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc |
a5274f0628bec7a77fc2722ced723c4f35f3fb4b | microcosm_flask/fields/query_string_list.py | microcosm_flask/fields/query_string_list.py | """
A list field field that supports query string parameter parsing.
"""
from marshmallow.fields import List, ValidationError
class SelfSerializableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = SelfSerializableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
| """
A list field field that supports query string parameter parsing.
"""
from marshmallow.fields import List, ValidationError
class PrintableList(list):
def __str__(self):
return ",".join(str(item) for item in self)
class QueryStringList(List):
def _deserialize(self, value, attr, obj):
"""
_deserialize handles multiple formats of query string parameter lists
including:
/foo?bars=1,2
/foo?bars[]=1&bars[]2
and returns a list of values
"""
if value is None:
return None
try:
attribute_elements = [attr_element.split(",") for attr_element in obj.getlist(attr)]
attribute_params = PrintableList(param for attr_param in attribute_elements for param in attr_param)
return attribute_params
except ValueError:
raise ValidationError("Invalid query string list argument")
| Change the name of SelfSerializableList to PrintableList | Change the name of SelfSerializableList to PrintableList
| Python | apache-2.0 | globality-corp/microcosm-flask,globality-corp/microcosm-flask |
faa74af66ff0542c5a08d85caf2e2b897506b1d0 | custom/ewsghana/handlers/help.py | custom/ewsghana/handlers/help.py | from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = [c.code for c in SQLProduct.by_domain(self.domain).order_by('code')]
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
| from corehq.apps.products.models import SQLProduct
from custom.ewsghana.handlers import HELP_TEXT
from custom.ilsgateway.tanzania.handlers.keyword import KeywordHandler
class HelpHandler(KeywordHandler):
def help(self):
self.respond(HELP_TEXT)
def handle(self):
topic = self.args[0].lower()
if topic == 'stock':
self.respond("Please send your receipts in the format "
"' <Commodity code> <stock on hand > . <quantity received>'")
elif topic == 'stop':
self.respond("Text 'stop' to stop receiving text message reminders.")
elif topic == 'start':
self.respond("Text 'start' to get text message reminders every week to submit your stock reports.")
elif 'code' in topic:
codes = SQLProduct.by_domain(self.domain).order_by('code').values_list('code', flat=True)
self.respond("Available commodity codes: %(codes)s", codes=", ".join(codes))
else:
try:
sql_product = SQLProduct.objects.get(domain=self.domain, code=topic)
msg = "%s is the commodity code for %s" % (topic, sql_product.name)
if sql_product.units:
msg += " (%s)" % sql_product.units
if sql_product.description and sql_product.description not in sql_product.name:
msg += " %s" % sql_product.description
self.respond(msg)
except SQLProduct.DoesNotExist:
self.help()
| Use values_list instead of iterating over | Use values_list instead of iterating over
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq |
0a0d5e0c833c82a26697f049444bb6e3c359c3c7 | django_lti_tool_provider/urls.py | django_lti_tool_provider/urls.py | from django.conf.urls import url
from django_lti_tool_provider import views as lti_views
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='lti')
]
| from django.conf.urls import url
from django_lti_tool_provider import views as lti_views
app_name = 'django_lti_tool_provider'
urlpatterns = [
url(r'', lti_views.LTIView.as_view(), name='lti')
]
| Adjust URL configuration based on changes introduced in Django 1.9: | Adjust URL configuration based on changes introduced in Django 1.9:
- URL application namespace required if setting an instance namespace:
https://docs.djangoproject.com/en/2.1/releases/1.9/#url-application-namespace-required-if-setting-an-instance-namespace
| Python | agpl-3.0 | open-craft/django-lti-tool-provider |
bff6e81fc952efdbee12e9c05be630f12f61d929 | pygraphc/similarity/pyjwJaroWinkler.py | pygraphc/similarity/pyjwJaroWinkler.py | from pyjarowinkler import distance
from itertools import combinations
from time import time
start = time()
log_file = '/home/hs32832011/Git/labeled-authlog/dataset/Hofstede2014/dataset1_perday/Dec 1.log'
with open(log_file, 'r') as f:
lines = f.readlines()
log_length = len(lines)
for line1, line2 in combinations(xrange(log_length), 2):
# string1 = unicode(lines[line1], 'utf-8')
# string2 = unicode(lines[line2], 'utf-8')
string1 = lines[line1]
string2 = lines[line2]
dist = distance.get_jaro_distance(string1, string2)
print dist
# print runtime
duration = time() - start
minute, second = divmod(duration, 60)
hour, minute = divmod(minute, 60)
print "Runtime: %d:%02d:%02d" % (hour, minute, second)
| Add Jaro-Winkler distance based pyjarowinkler library | Add Jaro-Winkler distance based pyjarowinkler library
| Python | mit | studiawan/pygraphc |
|
b0bed22c3ccafe596cf715f2be56c3261b4a6853 | reporting_scripts/course_completers.py | reporting_scripts/course_completers.py | '''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['username'], document['name'], document['grade']])
output = CSV(result, ['Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
| '''
This module extracts the student IDs from the collection certificates_generatedcertificate
of the students who completed the course and achieved a certificate. The ids
are then used to extract the usernames of the course completers
Usage:
python course_completers.py
'''
from collections import defaultdict
from base_edx import EdXConnection
from generate_csv_report import CSV
connection = EdXConnection('certificates_generatedcertificate', 'auth_user')
collection = connection.get_access_to_collection()
completers = collection['certificates_generatedcertificate'].find({'status' : 'downloadable'})
result = []
for document in completers:
user_document = collection['auth_user'].find_one({"id" : document['user_id']})
result.append([user_document['id'],user_document['username'], document['name'], document['grade']])
output = CSV(result, ['User ID','Username', 'Name', 'Grade'], output_file='course_completers.csv')
output.generate_csv()
| Update to include User ID in result | Update to include User ID in result
| Python | mit | McGillX/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research,andyzsf/edx_data_research,McGillX/edx_data_research |
fd9c73fc65a7234732ed55a7ae89365aec6cf123 | behave_django/runner.py | behave_django/runner.py | from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(*args, **kwargs):
pass
def teardown_databases(*args, **kwargs):
pass
| from django.test.runner import DiscoverRunner
from behave_django.environment import BehaveHooksMixin
from behave_django.testcase import (BehaviorDrivenTestCase,
ExistingDatabaseTestCase)
class BehaviorDrivenTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the BehaviorDrivenTestCase
"""
testcase_class = BehaviorDrivenTestCase
class ExistingDatabaseTestRunner(DiscoverRunner, BehaveHooksMixin):
"""
Test runner that uses the ExistingDatabaseTestCase
This test runner nullifies Django's test database setup methods. Using this
test runner would make your tests run with the default configured database
in settings.py.
"""
testcase_class = ExistingDatabaseTestCase
def setup_databases(self, **kwargs):
pass
def teardown_databases(self, old_config, **kwargs):
pass
| Fix Landscape complaint "Method has no argument" | Fix Landscape complaint "Method has no argument"
| Python | mit | bittner/behave-django,behave/behave-django,behave/behave-django,bittner/behave-django |
dfc7c7ae72b91f3bc7724da6b0d8071b3e9253b7 | altair/vegalite/v2/examples/us_state_capitals.py | altair/vegalite/v2/examples/us_state_capitals.py | """
U.S. state capitals overlayed on a map of the U.S
================================================-
This is a geographic visualization that shows US capitals
overlayed on a map.
"""
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text().encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points
| """
U.S. state capitals overlayed on a map of the U.S
================================================
This is a layered geographic visualization that shows US capitals
overlayed on a map.
"""
import altair as alt
from vega_datasets import data
states = alt.UrlData(data.us_10m.url,
format=alt.TopoDataFormat(type='topojson',
feature='states'))
capitals = data.us_state_capitals.url
# US states background
background = alt.Chart(states).mark_geoshape(
fill='lightgray',
stroke='white'
).properties(
projection={'type': 'albersUsa'},
width=800,
height=500
)
# State capitals labeled on background
points = alt.Chart(capitals).mark_text(dy=-5, align='right').encode(
alt.Text('city', type='nominal'),
alt.X('lon', type='longitude'),
alt.Y('lat', type='latitude'),
)
chart = background + points + points.mark_point(color='black')
| Add points for capital locations> | Add points for capital locations>
| Python | bsd-3-clause | ellisonbg/altair,jakevdp/altair,altair-viz/altair |
9c7bed0917bc8a14b7be1f98f392f6669cd259d8 | ideascube/conf/idb_lbn_elmarj.py | ideascube/conf/idb_lbn_elmarj.py | # -*- coding: utf-8 -*-
"""El-Marj box in Lebanon"""
from .idb import * # noqa
IDEASCUBE_NAME = u"El-Marj Lebanon" # Fixme
COUNTRIES_FIRST = ['LB', 'SY', 'JO', 'PS']
TIME_ZONE = 'Asia/Beirut'
LANGUAGE_CODE = 'ar'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
},
{
'id': 'wikipedia',
'languages': ['ar']
},
{
'id': 'wiktionary',
'languages': ['ar']
},
{
'id': 'wikiversity',
'languages': ['ar']
},
{
'id': 'wikibooks',
'languages': ['ar']
},
{
'id': 'wikisource',
'languages': ['ar']
},
{
'id': 'wikiquote',
'languages': ['ar']
},
{
'id': 'bil-tunisia',
'languages': ['ar']
},
]
| # -*- coding: utf-8 -*-
"""El Marj box in Lebanon"""
from .idb import * # noqa
IDEASCUBE_NAME = u"El Marj Lebanon" # Fixme
COUNTRIES_FIRST = ['LB', 'SY', 'JO', 'PS']
TIME_ZONE = 'Asia/Beirut'
LANGUAGE_CODE = 'ar'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
},
{
'id': 'wikipedia',
'languages': ['ar']
},
{
'id': 'wiktionary',
'languages': ['ar']
},
{
'id': 'wikiversity',
'languages': ['ar']
},
{
'id': 'wikibooks',
'languages': ['ar']
},
{
'id': 'wikisource',
'languages': ['ar']
},
{
'id': 'wikiquote',
'languages': ['ar']
},
{
'id': 'bil-tunisia',
'languages': ['ar']
},
]
| Fix teh fixed fix of fscked fix. | Fix teh fixed fix of fscked fix.
| Python | agpl-3.0 | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube |
80a940305765a22f96b0c0af0b0b46f1e3f5c377 | tests/unit/models/listing/test_generator.py | tests/unit/models/listing/test_generator.py | """Test praw.models.front."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
| """Test praw.models.listing.generator."""
from praw.models.listing.generator import ListingGenerator
from ... import UnitTest
class TestListingGenerator(UnitTest):
def test_params_are_not_modified(self):
params = {"prawtest": "yes"}
generator = ListingGenerator(None, None, params=params)
assert "limit" in generator.params
assert "limit" not in params
assert ("prawtest", "yes") in generator.params.items()
| Fix docstring typo in ListingGenerator unit tests | Fix docstring typo in ListingGenerator unit tests
| Python | bsd-2-clause | praw-dev/praw,praw-dev/praw |
c0e90114c7a84cfa94fb3f0e862e0453101544ba | flamingo/flamingo/settings/prod.py | flamingo/flamingo/settings/prod.py | import os
import raven
from flamingo.settings.base import BaseSettings
class ProdSettings(BaseSettings):
DEBUG = False
ALLOWED_HOSTS = ['127.0.0.1', 'localhost',]
# Sentry
# @property
# def RAVEN_CONFIG(self):
# return {
# 'dsn': 'https://{public_key}:{secret_key}@app.getsentry.com/{project_id}'.format(
# public_key='7404ed97fa2044418aa231daa72658fc',
# secret_key=os.environ['FLAMINGO_RAVEN_SECRET_KEY'],
# project_id='64150',
# ),
# 'release': raven.fetch_git_sha(self.TOP_DIR),
# }
| import os
import raven
from flamingo.settings.base import BaseSettings
class ProdSettings(BaseSettings):
DEBUG = False
ALLOWED_HOSTS = ['*',] # Heroku handles this under the hood
# Sentry
# @property
# def RAVEN_CONFIG(self):
# return {
# 'dsn': 'https://{public_key}:{secret_key}@app.getsentry.com/{project_id}'.format(
# public_key='7404ed97fa2044418aa231daa72658fc',
# secret_key=os.environ['FLAMINGO_RAVEN_SECRET_KEY'],
# project_id='64150',
# ),
# 'release': raven.fetch_git_sha(self.TOP_DIR),
# }
| Allow all hosts on Heroku | Allow all hosts on Heroku
| Python | isc | RevolutionTech/flamingo,RevolutionTech/flamingo,RevolutionTech/flamingo,RevolutionTech/flamingo |
7416f2fc34bad2036024874ad6a0c9a5f57d0657 | education/management/commands/fake_incoming_message.py | education/management/commands/fake_incoming_message.py | from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
if not options['phone']:
phone = raw_input('Phone number you wish the message to appear to come from: ')
else:
phone = options['phone']
if not options['text']:
text = raw_input('Text of the message: ')
else:
text = options['text']
connection = Connection.object.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
| from django.core.management.base import BaseCommand
from optparse import make_option
from rapidsms_httprouter.router import get_router
from rapidsms.models import Connection
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option("-p", "--phone", dest="phone"),
make_option("-t", "--text", dest="text"),
)
def handle(self, **options):
phone = options['phone'] or raw_input('Phone number you wish the message to appear to come from: ')
text = options['text'] or raw_input('Text of the message: ')
connection = Connection.objects.get(identity = phone)
router = get_router()
handled = router.handle_incoming(connection.backend.name, connection.identity, text)
self.stdout.write('Done!\n')
| Simplify the requesting of parameters. | Simplify the requesting of parameters.
| Python | bsd-3-clause | unicefuganda/edtrac,unicefuganda/edtrac,unicefuganda/edtrac |
e5a94d2902a66d55be62b92e35ac90ac7aed7991 | javascript/navigator/__init__.py | javascript/navigator/__init__.py | __author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
class Navigator(v8.JSClass):
def __init__(self, runtime):
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
# if 'location' in runtime.manifest.get('capabilities', []):
if True:
self.geolocation = Geolocation(runtime)
| __author__ = 'katharine'
import PyV8 as v8
from geolocation import Geolocation
from javascript.exceptions import JSRuntimeException
class Navigator(v8.JSClass):
def __init__(self, runtime):
self._runtime = runtime
# W3C spec says that if geolocation is disabled, navigator.geolocation should not exist.
if 'location' in runtime.manifest.get('capabilities', []):
self.geolocation = Geolocation(runtime)
def __getattr__(self, item):
# __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation
# does not exist.
# This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`),
# but throw an informative exception if it's accessed.
if item == 'geolocation':
raise JSRuntimeException(
self._runtime,
"You must add 'location' to the appinfo.json capabilities array to access geolocation."
)
else:
raise AttributeError
| Implement location restriction more thoroughly. | Implement location restriction more thoroughly.
| Python | mit | youtux/pypkjs,pebble/pypkjs |
70847e9d88f086d52e167629666aebe5137c7a2e | debileweb/blueprints/forms.py | debileweb/blueprints/forms.py | from wtforms import TextField, BooleanField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
| # Copyright (c) 2013 Sylvestre Ledru <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from wtforms import TextField, Form
from wtforms.validators import Required
class SearchPackageForm(Form):
package = TextField('package', validators = [Required()])
maintainer = TextField('maintainer', validators = [Required()])
| Add license + remove useless declaration | Add license + remove useless declaration
| Python | mit | opencollab/debile-web,opencollab/debile-web,opencollab/debile-web |
78ca15758018d52f1353b29410f97bba215e0be2 | django_afip/views.py | django_afip/views.py | from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
template_name = 'django_afip/invoice.html'
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
| from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
| Remove unused (albeit confusing) variable | Remove unused (albeit confusing) variable
See #13
| Python | isc | hobarrera/django-afip,hobarrera/django-afip |
13a2ea421b761b9009fb7e1328e54cf0ae5cc54f | gapipy/resources/booking/agency.py | gapipy/resources/booking/agency.py | from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [('documents', AgencyDocument)]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| from __future__ import unicode_literals
from ...models import Address
from ...models import AgencyDocument
from ...models.base import BaseModel
from .agency_chain import AgencyChain
from ..base import Resource
from ..tour import Promotion
class AgencyEmail(BaseModel):
_as_is_fields = ['type', 'address']
class Agency(Resource):
_resource_name = 'agencies'
_is_listable = False
_is_parent_resource = True
_as_is_fields = ['id', 'href', 'name', 'booking_currencies', 'latitude', 'longitude', 'transactional_email']
_date_time_fields_local = ['date_created']
_model_fields = [('address', Address)]
_resource_fields = [('agency_chain', AgencyChain)]
_model_collection_fields = [
('documents', AgencyDocument),
('emails', AgencyEmail),
]
_resource_collection_fields = [
('bookings', 'Booking'),
('agents', 'Agent'),
('promotions', Promotion),
]
| Add new Agency resource fields | Add new Agency resource fields
| Python | mit | gadventures/gapipy |
3b9508ff6546974ffb2aee8fe38aae15799aafc5 | cellcounter/accounts/urls.py | cellcounter/accounts/urls.py | from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse
from .views import RegistrationView, PasswordChangeView, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': '/',
},
name='reset-request'),
url('^password/reset/confirm/(?P<uidb64>\d+)/(?P<token>[\d\w-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': password_reset_done,
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) | from django.conf.urls import patterns, url
from .views import RegistrationView, PasswordChangeView, password_reset_sent, password_reset_done
urlpatterns = patterns('',
url('^new/$', RegistrationView.as_view(), name='register'),
url('^password/reset/$', 'django.contrib.auth.views.password_reset', {
'template_name': 'accounts/reset_form.html',
'email_template_name': 'accounts/reset_email.txt',
'subject_template_name': 'accounts/reset_subject.txt',
'current_app': 'cellcounter.accounts',
'post_reset_redirect': 'password-reset-sent',
},
name='password-reset'),
url('^password/reset/sent/$', password_reset_sent, name='password-reset-sent'),
url('^password/reset/done/$', password_reset_done, name='password-reset-done'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
'django.contrib.auth.views.password_reset_confirm', {
'template_name': 'accounts/reset_confirm.html',
'post_reset_redirect': 'password-reset-done',
},
name='password-reset-confirm'),
url('^password/change/$', PasswordChangeView.as_view(), name='change-password'),
) | Add correct reset-sent and reset-done redirect views, tidy regex | Add correct reset-sent and reset-done redirect views, tidy regex
| Python | mit | haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter |
eecf64c177c25be34b597e419ce22450440e445f | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.12',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.13',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 0.2.13 | Update the PyPI version to 0.2.13
| Python | mit | electronick1/todoist-python,Doist/todoist-python |
2f0627c1e5c087cf5b712e846b4f687259342063 | credentials/management/commands/import_sshkeypair.py | credentials/management/commands/import_sshkeypair.py | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a label, public keyfile and private keyfile")
label, public_key, private_key = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from credentials.management.helpers import import_sshkeypair
class Command(BaseCommand):
help = "Import ssh keypair"
args = "[public key filename] [private key filename] [name]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if label already exists."),
)
def handle(self, *args, **options):
if len(args) != 3:
raise CommandError(
"must provide a public keyfile, private keyfile and label")
public_key, private_key, name = args
import_sshkeypair(
label, public_key, private_key,
update=options["update"], stdout=self.stdout)
transaction.commit_unless_managed()
| Change the help and assignments to match. | Change the help and assignments to match.
| Python | mit | caio1982/capomastro,caio1982/capomastro,timrchavez/capomastro,timrchavez/capomastro,caio1982/capomastro |
58078b1d4eb64c7104715352fc11bf7abffd48a4 | feincms/management/commands/update_rsscontent.py | feincms/management/commands/update_rsscontent.py | from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
| from django.core.management.base import NoArgsCommand
from feincms.content.rss.models import RSSContent
class Command(NoArgsCommand):
help = "Run this as a cronjob."
def handle_noargs(self, **options):
# find all concrete content types of RSSContent
for cls in RSSContent._feincms_content_models:
for content in cls.objects.all():
content.cache_content()
| Add small explaining note to the RSSContent updating management command | Add small explaining note to the RSSContent updating management command
| Python | bsd-3-clause | hgrimelid/feincms,nickburlett/feincms,feincms/feincms,nickburlett/feincms,hgrimelid/feincms,joshuajonah/feincms,pjdelport/feincms,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,matthiask/feincms2-content,matthiask/feincms2-content,feincms/feincms,nickburlett/feincms,michaelkuty/feincms,hgrimelid/feincms,matthiask/django-content-editor,michaelkuty/feincms,feincms/feincms,matthiask/feincms2-content,michaelkuty/feincms,pjdelport/feincms,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,joshuajonah/feincms,mjl/feincms |
978b6b46bc1f6b7cb70c14fd929e757a41436f87 | test/test_helpers.py | test/test_helpers.py | import numpy as np
from opensauce.helpers import wavread
from test.support import TestCase, data_file_path, loadmat
class TestSupport(TestCase):
def test_wavread(self):
fn = data_file_path('beijing_f3_50_a.wav')
samples, Fs = wavread(fn)
expected = loadmat('beijing_f3_50_a-wavread-expected')
self.assertEqual(Fs, expected['Fs'])
# XXX may need to use allclose here instead of array_equal.
if not np.array_equal(samples, expected['y']):
# Produce a useful error message for debugging.
self.assertEqual(list(samples), list(expected['y']))
| import numpy as np
from opensauce.helpers import wavread
from test.support import TestCase, data_file_path, loadmat
class TestSupport(TestCase):
def test_wavread(self):
fn = data_file_path('beijing_f3_50_a.wav')
samples, Fs = wavread(fn)
expected = loadmat('beijing_f3_50_a-wavread-expected')
self.assertEqual(Fs, expected['Fs'])
self.assertTrue(np.array_equal(samples, expected['y']))
# XXX may need to use allclose here instead of array_equal.
if not np.array_equal(samples, expected['y']):
# Produce a useful error message for debugging.
self.assertEqual(list(samples), list(expected['y']))
| Add assertion for checking arrays read from wavread | Add assertion for checking arrays read from wavread
| Python | apache-2.0 | voicesauce/opensauce-python,voicesauce/opensauce-python,voicesauce/opensauce-python |
25054c4f9b20cef1a43aea680f75f7208c1fd3b7 | connman_dispatcher/detect.py | connman_dispatcher/detect.py | import glib
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from pyee import EventEmitter
import logbook
logger = logbook.Logger('connman-dispatcher')
__all__ = ['detector']
def property_changed(_, message):
if message.get_member() == "PropertyChanged":
_, state = message.get_args_list()
if state == 'online':
logger.info('network state change: online' )
detector.emit('up')
elif state == 'idle':
logger.info('network state change: offline' )
detector.emit('down')
detector = EventEmitter()
DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
bus.add_match_string_non_blocking("interface='net.connman.Manager'")
bus.add_message_filter(property_changed)
manager = dbus.Interface(bus.get_object('net.connman', "/"), 'net.connman.Manager')
def is_online():
properties = manager.GetProperties()
if properties['State'] == 'online':
return True
return False
def run():
mainloop = glib.MainLoop()
mainloop.run()
detector.run = run
detector.is_online = is_online
| import glib
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from pyee import EventEmitter
import logbook
logger = logbook.Logger('connman-dispatcher')
__all__ = ['detector']
def property_changed(_, message):
if message.get_member() == "PropertyChanged":
_, state = message.get_args_list()
if state == 'online' and not detector.is_online:
logger.info('network state change: online' )
detector.emit('up')
detector.is_online = True
elif state == 'idle':
logger.info('network state change: offline' )
detector.emit('down')
detector.is_online = False
detector = EventEmitter()
detector.is_online = is_online()
DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
bus.add_match_string_non_blocking("interface='net.connman.Manager'")
bus.add_message_filter(property_changed)
manager = dbus.Interface(bus.get_object('net.connman', "/"), 'net.connman.Manager')
def is_online():
properties = manager.GetProperties()
if properties['State'] == 'online':
return True
return False
def run():
mainloop = glib.MainLoop()
mainloop.run()
detector.run = run
detector.is_online = is_online
| Fix bug when sometimes online event was reported twice | Fix bug when sometimes online event was reported twice
| Python | isc | a-sk/connman-dispatcher |
b1f1f4991abdd3f8854923ca7a2bc1b7e9cf6a53 | easyfuse/__init__.py | easyfuse/__init__.py | """
A Python library to create a simple FUSE file system.
.. :copyright: (c) 2016 by Jelte Fennema.
:license: MIT, see License for more details.
"""
| Add docstring to main module | Add docstring to main module
| Python | mit | JelteF/easyfuse,JelteF/easyfuse |
|
308bc2add0cc9d2d8af1d1851d71caa284094f62 | helusers/tests/test_oidc_api_token_authentication.py | helusers/tests/test_oidc_api_token_authentication.py | import json
import time
import uuid
import pytest
from jose import jwt
from helusers.oidc import ApiTokenAuthentication
from .keys import rsa_key
ISSUER = "test_issuer"
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf):
sut = _TestableApiTokenAuthentication()
unix_timestamp_now = int(time.time())
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
jwt_data = {
"iss": ISSUER,
"aud": "test_audience",
"iat": unix_timestamp_now - 10,
"exp": unix_timestamp_now + 1000,
"sub": str(user_uuid),
}
encoded_jwt = jwt.encode(
jwt_data, key=rsa_key.private_key_pem, algorithm=rsa_key.jose_algorithm
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
| import json
import uuid
import pytest
from helusers.oidc import ApiTokenAuthentication
from .conftest import encoded_jwt_factory, ISSUER1
from .keys import rsa_key
class _TestableApiTokenAuthentication(ApiTokenAuthentication):
@property
def oidc_config(self):
return {
"issuer": ISSUER1,
}
def jwks_data(self):
return json.dumps({"keys": [rsa_key.public_key_jwk]})
@pytest.mark.django_db
def test_valid_jwt_is_accepted(rf, unix_timestamp_now):
sut = _TestableApiTokenAuthentication()
user_uuid = uuid.UUID("b7a35517-eb1f-46c9-88bf-3206fb659c3c")
encoded_jwt = encoded_jwt_factory(
iss=ISSUER1,
aud="test_audience",
iat=unix_timestamp_now - 10,
exp=unix_timestamp_now + 1000,
sub=str(user_uuid),
)
request = rf.get("/path", HTTP_AUTHORIZATION=f"Bearer {encoded_jwt}")
(user, auth) = sut.authenticate(request)
assert user.uuid == user_uuid
assert auth.user == user
| Use common test helpers in a test | Use common test helpers in a test
| Python | bsd-2-clause | City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers |
44f1e6ec95305bd7b4d69bbcdfb386f5ca958bdc | imagedownloader/stations/tests/units/test_devices.py | imagedownloader/stations/tests/units/test_devices.py | # -*- coding: utf-8 -*-
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestProducts(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + self.device.product.name + ")") | # -*- coding: utf-8 -*-
from stations.models import *
from django.test import TestCase
from datetime import datetime
import pytz
class TestDevices(TestCase):
fixtures = [ 'initial_data.yaml', '*']
def setUp(self):
self.device = Device.objects.filter(product__name = 'CMP 11')[0]
def test_serialization(self):
# check if the __str__ method is defined to return the object serial_number and a device product name.
self.assertEquals(str(self.device), self.device.serial_number + " (" + str(self.device.product) + ")") | Correct the name of the devices' test case to TestDevices (copy&paste bug). | stations: Correct the name of the devices' test case to TestDevices (copy&paste bug).
| Python | mit | gersolar/solar_radiation_model,ahMarrone/solar_radiation_model,scottlittle/solar_radiation_model |
a7ccf4fac47762668214916b1c5c05d78c563bf5 | tests/integration/test_redirection_relative.py | tests/integration/test_redirection_relative.py | """Check relative REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
nikola.utils.makedirs(os.path.join(target_dir, "files", "foo"))
target_path = os.path.join(target_dir, "files", "foo", "bar.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("foo")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("foo.html", "foo/bar.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
| """Check relative REDIRECTIONS"""
import io
import os
import pytest
import nikola.plugins.command.init
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
redirects_dir = os.path.join(target_dir, "files", "redirects")
nikola.utils.makedirs(redirects_dir)
target_path = os.path.join(redirects_dir, "rel_src.html")
with io.open(target_path, "w+", encoding="utf8") as outf:
outf.write("relative")
append_config(
target_dir,
"""
REDIRECTIONS = [ ("relative.html", "redirects/rel_src.html"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
| Refactor in preparation of merge of relative tests. | Refactor in preparation of merge of relative tests.
| Python | mit | getnikola/nikola,okin/nikola,okin/nikola,getnikola/nikola,okin/nikola,okin/nikola,getnikola/nikola,getnikola/nikola |
76756a31e15cb5a9b756030c3bd90d06c898b524 | go/apps/surveys/definition.py | go/apps/surveys/definition.py | from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
from go.apps.surveys.tasks import export_vxpolls_data
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
| from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendSurveyAction(ConversationAction):
action_name = 'send_survey'
action_display_name = 'Send Survey'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.batch.key,
msg_options={}, delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
action_display_verb = 'Send CSV via e-mail'
def perform_action(self, action_data):
# This is Django-only, but the module get imported in vumi-land.
from go.apps.surveys.tasks import export_vxpolls_data
return export_vxpolls_data.delay(self._conv.user_account.key,
self._conv.key)
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'surveys'
actions = (
SendSurveyAction,
DownloadUserDataAction,
)
| Move survey action celery task import to method scope. | Move survey action celery task import to method scope.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go |
c3ff26ad884592d297e1aec67bce468e6669fc96 | panoptes_cli/scripts/panoptes.py | panoptes_cli/scripts/panoptes.py | import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option(
'--endpoint', type=str
)
@click.pass_context
def cli(ctx, endpoint):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password']
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
| import click
import os
import yaml
from panoptes_client import Panoptes
@click.group()
@click.option('--endpoint', type=str)
@click.option('--admin', is_flag=True)
@click.pass_context
def cli(ctx, endpoint, admin):
ctx.config_dir = os.path.expanduser('~/.panoptes/')
ctx.config_file = os.path.join(ctx.config_dir, 'config.yml')
ctx.config = {
'endpoint': 'https://panoptes.zooniverse.org',
'username': '',
'password': '',
}
try:
with open(ctx.config_file) as conf_f:
ctx.config.update(yaml.load(conf_f))
except IOError:
pass
if endpoint:
ctx.config['endpoint'] = endpoint
Panoptes.connect(
endpoint=ctx.config['endpoint'],
username=ctx.config['username'],
password=ctx.config['password'],
admin=admin,
)
from panoptes_cli.commands.configure import *
from panoptes_cli.commands.project import *
from panoptes_cli.commands.subject import *
from panoptes_cli.commands.subject_set import *
from panoptes_cli.commands.workflow import *
| Add --admin option for connecting in admin mode | Add --admin option for connecting in admin mode
| Python | apache-2.0 | zooniverse/panoptes-cli |
Subsets and Splits