commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
538f8e3382e274402f2f71ba79439fae0828b3cf
IPython/html.py
IPython/html.py
""" Shim to maintain backwards compatibility with old IPython.html imports. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import sys from warnings import warn warn("The `IPython.html` package has been deprecated. " "You should import from jupyter_notebook instead.") from IPython.utils.shimmodule import ShimModule sys.modules['IPython.html'] = ShimModule( src='IPython.html', mirror='jupyter_notebook') if __name__ == '__main__': from jupyter_notebook import notebookapp as app app.launch_new_instance()
""" Shim to maintain backwards compatibility with old IPython.html imports. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import sys from warnings import warn warn("The `IPython.html` package has been deprecated. " "You should import from `jupyter_notebook` and `jupyter_widgets` instead.") from IPython.utils.shimmodule import ShimModule sys.modules['IPython.html'] = ShimModule( src='IPython.html', mirror='jupyter_notebook') sys.modules['IPython.html.widgets'] = ShimModule( src='IPython.html.widgets', mirror='jupyter_widgets') if __name__ == '__main__': from jupyter_notebook import notebookapp as app app.launch_new_instance()
Add shim to new widgets repository.
Add shim to new widgets repository.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
--- +++ @@ -8,12 +8,14 @@ from warnings import warn warn("The `IPython.html` package has been deprecated. " - "You should import from jupyter_notebook instead.") + "You should import from `jupyter_notebook` and `jupyter_widgets` instead.") from IPython.utils.shimmodule import ShimModule sys.modules['IPython.html'] = ShimModule( src='IPython.html', mirror='jupyter_notebook') +sys.modules['IPython.html.widgets'] = ShimModule( + src='IPython.html.widgets', mirror='jupyter_widgets') if __name__ == '__main__': from jupyter_notebook import notebookapp as app
0b5a657339870c7669082c39f8290c88732aa92e
extractor.py
extractor.py
from extraction.core import ExtractionRunner from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult import os import sys import grobid import pdfbox import filters if __name__ == '__main__': runner = ExtractionRunner() runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor) runner.add_runnable(filters.AcademicPaperFilter) argc = len(sys.argv) if argc == 2: runner.run_from_file(sys.argv[1]) elif argc == 3: runner.run_from_file(sys.argv[1], output_dir = sys.argv[2]) else: print("USAGE: python {0} path_to_pdf [output_directory]")
from extraction.core import ExtractionRunner from extraction.runnables import Extractor, RunnableError, Filter, ExtractorResult import os import sys import grobid import pdfbox import filters def get_extraction_runner(): runner = ExtractionRunner() runner.add_runnable(grobid.GrobidPlainTextExtractor) # OR # runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor) runner.add_runnable(filters.AcademicPaperFilter) return runner if __name__ == '__main__': runner = get_extraction_runner() argc = len(sys.argv) if argc == 2: runner.run_from_file(sys.argv[1]) elif argc == 3: runner.run_from_file(sys.argv[1], output_dir = sys.argv[2]) else: print("USAGE: python {0} path_to_pdf [output_directory]")
Make code a little cleaner
Make code a little cleaner
Python
apache-2.0
Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,Tiger66639/new-csx-extractor,Tiger66639/new-csx-extractor,SeerLabs/new-csx-extractor,SeerLabs/new-csx-extractor
--- +++ @@ -6,11 +6,21 @@ import pdfbox import filters -if __name__ == '__main__': +def get_extraction_runner(): runner = ExtractionRunner() - runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor) + + runner.add_runnable(grobid.GrobidPlainTextExtractor) + # OR + # runner.add_runnable(pdfbox.PDFBoxPlainTextExtractor) + runner.add_runnable(filters.AcademicPaperFilter) + + return runner + + +if __name__ == '__main__': + runner = get_extraction_runner() argc = len(sys.argv) if argc == 2:
af182857b4a70245b0b06bbf37e2d67e0ded493f
ez_gpg/ui.py
ez_gpg/ui.py
import gi import gnupg # Requires python3-gnupg gi.require_version('Gtk', '3.0') from gi.repository import Gtk class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="EZ GPG") self.connect("delete-event", Gtk.main_quit) self.set_border_width(30) gpg_keys_list = Gtk.ListStore(str, str) for key in self._get_gpg_keys(): gpg_keys_list.append([key['keyid'], "%s %s" % (key['keyid'], key['uids'][0])]) gpg_key_combo_box = Gtk.ComboBox.new_with_model_and_entry(gpg_keys_list) gpg_key_combo_box.set_entry_text_column(1) self.add(gpg_key_combo_box) def _get_gpg_keys(self): gpg = gnupg.GPG() return gpg.list_keys() class EzGpg(Gtk.Window): def launch(self): MainWindow().show_all() Gtk.main()
import gi import gnupg # Requires python3-gnupg gi.require_version('Gtk', '3.0') from gi.repository import Gtk class GpgKeyList(Gtk.ComboBox): def __init__(self): Gtk.ComboBox.__init__(self) gpg_keys_list = Gtk.ListStore(str, str) for key in self._get_gpg_keys(): key_id = key['keyid'] key_name = "%s %s" % (key['keyid'], key['uids'][0]) gpg_keys_list.append([key_id, key_name]) cell = Gtk.CellRendererText() self.pack_start(cell, True) self.add_attribute(cell, 'text', 1) self.set_model(gpg_keys_list) self.set_entry_text_column(1) def _get_gpg_keys(self): gpg = gnupg.GPG() return gpg.list_keys() class MainWindow(Gtk.Window): def __init__(self): Gtk.Window.__init__(self, title="EZ GPG") self.connect("delete-event", Gtk.main_quit) self.set_border_width(30) self.set_position(Gtk.WindowPosition.CENTER) gpg_key_combo = GpgKeyList() self.add(gpg_key_combo) class EzGpg(Gtk.Window): def launch(self): MainWindow().show_all() Gtk.main()
Split out gpg key list into its own class
Split out gpg key list into its own class This will make it easy to break out into a module when we need it. In the process, window was also set to be in the center of the user's screen.
Python
lgpl-2.1
sgnn7/ez_gpg,sgnn7/ez_gpg
--- +++ @@ -5,21 +5,23 @@ from gi.repository import Gtk -class MainWindow(Gtk.Window): +class GpgKeyList(Gtk.ComboBox): def __init__(self): - Gtk.Window.__init__(self, title="EZ GPG") - self.connect("delete-event", Gtk.main_quit) - - self.set_border_width(30) + Gtk.ComboBox.__init__(self) gpg_keys_list = Gtk.ListStore(str, str) for key in self._get_gpg_keys(): - gpg_keys_list.append([key['keyid'], "%s %s" % (key['keyid'], key['uids'][0])]) + key_id = key['keyid'] + key_name = "%s %s" % (key['keyid'], key['uids'][0]) - gpg_key_combo_box = Gtk.ComboBox.new_with_model_and_entry(gpg_keys_list) - gpg_key_combo_box.set_entry_text_column(1) + gpg_keys_list.append([key_id, key_name]) - self.add(gpg_key_combo_box) + cell = Gtk.CellRendererText() + self.pack_start(cell, True) + self.add_attribute(cell, 'text', 1) + + self.set_model(gpg_keys_list) + self.set_entry_text_column(1) def _get_gpg_keys(self): gpg = gnupg.GPG() @@ -27,6 +29,19 @@ return gpg.list_keys() +class MainWindow(Gtk.Window): + def __init__(self): + Gtk.Window.__init__(self, title="EZ GPG") + self.connect("delete-event", Gtk.main_quit) + + self.set_border_width(30) + self.set_position(Gtk.WindowPosition.CENTER) + + gpg_key_combo = GpgKeyList() + + self.add(gpg_key_combo) + + class EzGpg(Gtk.Window): def launch(self): MainWindow().show_all()
1dfbe495972a5f4d02ce374131f40d4474f24cc6
website/ember_osf_web/views.py
website/ember_osf_web/views.py
# -*- coding: utf-8 -*- import os import json import requests from flask import send_from_directory, Response, stream_with_context from framework.sessions import session from website.settings import EXTERNAL_EMBER_APPS, PROXY_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path'])) routes = [ '/quickfiles/', '/<uid>/quickfiles/' ] def use_ember_app(**kwargs): if PROXY_EMBER_APPS: resp = requests.get(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT) resp = Response(stream_with_context(resp.iter_content()), resp.status_code) else: resp = send_from_directory(ember_osf_web_dir, 'index.html') if session.data.get('status'): status = [{'id': stat.id if stat.id else stat.message, 'class': stat.css_class, 'jumbo': stat.jumbotron, 'dismiss': stat.dismissible, 'extra': stat.extra} for stat in session.data['status']] resp.set_cookie('status', json.dumps(status)) return resp
# -*- coding: utf-8 -*- import os import json import requests from flask import send_from_directory, Response, stream_with_context from framework.sessions import session from website.settings import EXTERNAL_EMBER_APPS, PROXY_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT ember_osf_web_dir = os.path.abspath(os.path.join(os.getcwd(), EXTERNAL_EMBER_APPS['ember_osf_web']['path'])) routes = [ '/quickfiles/', '/<uid>/quickfiles/' ] def use_ember_app(**kwargs): if PROXY_EMBER_APPS: resp = requests.get(EXTERNAL_EMBER_APPS['ember_osf_web']['server'], stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT) resp = Response(stream_with_context(resp.iter_content()), resp.status_code) else: resp = send_from_directory(ember_osf_web_dir, 'index.html') if session.data.get('status'): status = [{'id': stat[5] if stat[5] else stat[0], 'class': stat[2], 'jumbo': stat[1], 'dismiss': stat[3], 'extra': stat[6]} for stat in session.data['status']] resp.set_cookie('status', json.dumps(status)) return resp
Revert "Use namedtuple's getattr rather than indexing"
Revert "Use namedtuple's getattr rather than indexing" This reverts commit 5c4f93207c1fbfe9b9a478082d5f039a9e5ba720.
Python
apache-2.0
Johnetordoff/osf.io,adlius/osf.io,aaxelb/osf.io,felliott/osf.io,mfraezz/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,icereval/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,mattclark/osf.io,aaxelb/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,sloria/osf.io,felliott/osf.io,felliott/osf.io,brianjgeiger/osf.io,binoculars/osf.io,pattisdr/osf.io,adlius/osf.io,sloria/osf.io,Johnetordoff/osf.io,mattclark/osf.io,mfraezz/osf.io,adlius/osf.io,mfraezz/osf.io,icereval/osf.io,cslzchen/osf.io,aaxelb/osf.io,erinspace/osf.io,aaxelb/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,icereval/osf.io,binoculars/osf.io,baylee-d/osf.io,baylee-d/osf.io,caseyrollins/osf.io,felliott/osf.io,erinspace/osf.io,caseyrollins/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,adlius/osf.io,baylee-d/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,sloria/osf.io
--- +++ @@ -21,6 +21,6 @@ else: resp = send_from_directory(ember_osf_web_dir, 'index.html') if session.data.get('status'): - status = [{'id': stat.id if stat.id else stat.message, 'class': stat.css_class, 'jumbo': stat.jumbotron, 'dismiss': stat.dismissible, 'extra': stat.extra} for stat in session.data['status']] + status = [{'id': stat[5] if stat[5] else stat[0], 'class': stat[2], 'jumbo': stat[1], 'dismiss': stat[3], 'extra': stat[6]} for stat in session.data['status']] resp.set_cookie('status', json.dumps(status)) return resp
6a8068942d985f0c125749d5f58ad7cb9cd189be
scanpointgenerator/linegenerator_step.py
scanpointgenerator/linegenerator_step.py
from linegenerator import LineGenerator import math as m class StepLineGenerator(LineGenerator): def __init__(self, name, units, start, end, step): num = int(m.floor((end - start)/step)) super(StepLineGenerator, self).__init__(name, units, start, step, num)
from linegenerator import LineGenerator class StepLineGenerator(LineGenerator): def __init__(self, name, units, start, end, step): num = int((end - start)/step) + 1 super(StepLineGenerator, self).__init__(name, units, start, step, num)
Add extra point to include start
Add extra point to include start
Python
apache-2.0
dls-controls/scanpointgenerator
--- +++ @@ -1,11 +1,10 @@ from linegenerator import LineGenerator -import math as m class StepLineGenerator(LineGenerator): def __init__(self, name, units, start, end, step): - num = int(m.floor((end - start)/step)) + num = int((end - start)/step) + 1 super(StepLineGenerator, self).__init__(name, units, start, step, num)
acd5a676b08e070c804bdae78abba266b47c67b5
libvcs/__about__.py
libvcs/__about__.py
__title__ = 'libvcs' __package_name__ = 'libvcs' __description__ = 'vcs abstraction layer' __version__ = '0.3.0' __author__ = 'Tony Narlock' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2016 Tony Narlock'
__title__ = 'libvcs' __package_name__ = 'libvcs' __description__ = 'vcs abstraction layer' __version__ = '0.3.0' __author__ = 'Tony Narlock' __github__ = 'https://github.com/vcs-python/libvcs' __pypi__ = 'https://pypi.org/project/libvcs/' __email__ = '[email protected]' __license__ = 'MIT' __copyright__ = 'Copyright 2016- Tony Narlock'
Add pypi + github to metadata
Add pypi + github to metadata
Python
mit
tony/libvcs
--- +++ @@ -3,6 +3,8 @@ __description__ = 'vcs abstraction layer' __version__ = '0.3.0' __author__ = 'Tony Narlock' +__github__ = 'https://github.com/vcs-python/libvcs' +__pypi__ = 'https://pypi.org/project/libvcs/' __email__ = '[email protected]' __license__ = 'MIT' -__copyright__ = 'Copyright 2016 Tony Narlock' +__copyright__ = 'Copyright 2016- Tony Narlock'
c3ea49a3b040dd1e5252ead1a2ae577e037b8b32
wsme/release.py
wsme/release.py
name = "WSME" version = "0.4" description = """Web Services Made Easy makes it easy to \ implement multi-protocol webservices.""" author = "Christophe de Vienne" email = "[email protected]" url = "http://bitbucket.org/cdevienne/wsme" license = "MIT"
name = "WSME" version = "0.4b1" description = """Web Services Made Easy makes it easy to \ implement multi-protocol webservices.""" author = "Christophe de Vienne" email = "[email protected]" url = "http://bitbucket.org/cdevienne/wsme" license = "MIT"
Add a b1 version tag
Add a b1 version tag --HG-- extra : rebase_source : 637fbf8294dbe0881c87b4bfeea547303dc8ec96
Python
mit
stackforge/wsme
--- +++ @@ -1,5 +1,5 @@ name = "WSME" -version = "0.4" +version = "0.4b1" description = """Web Services Made Easy makes it easy to \ implement multi-protocol webservices."""
7efcc9987f827eec56677d95bc7ad873208b392f
saw/parser/sentences.py
saw/parser/sentences.py
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): #re.split('\!|\?|\. | \.',text) result = [] prev = 0 # we allow .09 as not end of sentences #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): curr, _next = m.start(), m.end() # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append(list( text[curr:_next].strip() )) prev = _next if len(text) > prev: result.append(text[prev:].strip()) return result
import base from blocks import Blocks import re class Sentences(base.Base): _type = 'sentences' child_class = Blocks @staticmethod def parse(text): _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences for m in re.finditer('[\!\?\.]+', text): curr, _next = m.start(), m.end() items = list( text[curr: _next].strip() ) if (_len > _next) and not (text[_next] == ' '): # delete ending '.' if they not before space or end of string while (len(items) > 0) and (items[-1] == '.'): items.pop() _next = _next - 1 if len(items) > 0: # if prev position of delimiter < current - between exists text # at least 1 symbol. if prev < curr: node = text[prev:curr].strip() if node != '': result.append(node) result.append( items ) prev = _next if _len > prev: result.append(text[prev:].strip()) return result
Optimize from 5-6s to 2.9-3.0
Optimize from 5-6s to 2.9-3.0
Python
mit
diNard/Saw
--- +++ @@ -8,22 +8,30 @@ @staticmethod def parse(text): - #re.split('\!|\?|\. | \.',text) + _len = len(text) result = [] prev = 0 # we allow .09 as not end of sentences - #for m in re.finditer('[\!\?]+|\.+(?:\s+|$|\?|\!)', text): - for m in re.finditer('\.+(?:\s+|$)|(\.*)[\!\?]+(\.+(?:\s+|$))*', text): + for m in re.finditer('[\!\?\.]+', text): curr, _next = m.start(), m.end() - # if prev position of delimiter < current - between exists text - # at least 1 symbol. - if prev < curr: - node = text[prev:curr].strip() - if node != '': - result.append(node) - result.append(list( text[curr:_next].strip() )) - prev = _next - if len(text) > prev: + items = list( text[curr: _next].strip() ) + + if (_len > _next) and not (text[_next] == ' '): + # delete ending '.' if they not before space or end of string + while (len(items) > 0) and (items[-1] == '.'): + items.pop() + _next = _next - 1 + + if len(items) > 0: + # if prev position of delimiter < current - between exists text + # at least 1 symbol. + if prev < curr: + node = text[prev:curr].strip() + if node != '': + result.append(node) + result.append( items ) + prev = _next + if _len > prev: result.append(text[prev:].strip()) return result
f67e47fd900de3953ee8abb45e5ea56851c10dee
yvs/set_pref.py
yvs/set_pref.py
# yvs.set_pref # coding=utf-8 from __future__ import unicode_literals import json import sys import yvs.shared as shared # Parse pref set data from the given JSON string def parse_pref_set_data_str(pref_set_data_str): pref_set_data = json.loads( pref_set_data_str)['alfredworkflow']['variables'] return [pref_set_data[key] for key in ('pref_id', 'pref_name', 'value_id', 'value_name')] # Set the YouVersion Suggest preference with the given key def set_pref(pref_id, value_id): user_prefs = shared.get_user_prefs() user_prefs[pref_id] = value_id # If new language is set, ensure that preferred version is updated also if pref_id == 'language': bible = shared.get_bible_data(language_id=value_id) user_prefs['version'] = bible['default_version'] shared.clear_cache() shared.set_user_prefs(user_prefs) def main(pref_set_data_str): pref_id, pref_name, value_id, value_name = parse_pref_set_data_str( pref_set_data_str) set_pref(pref_id, value_id) print(pref_set_data_str.encode('utf-8')) if __name__ == '__main__': main(sys.argv[1].decode('utf-8'))
# yvs.set_pref # coding=utf-8 from __future__ import unicode_literals import json import sys import yvs.shared as shared # Parse pref set data from the given JSON string def parse_pref_set_data_str(pref_set_data_str): pref_set_data = json.loads( pref_set_data_str)['alfredworkflow']['variables'] return [pref_set_data[key] for key in ['pref_id', 'pref_name', 'value_id', 'value_name']] # Set the YouVersion Suggest preference with the given key def set_pref(pref_id, value_id): user_prefs = shared.get_user_prefs() user_prefs[pref_id] = value_id # If new language is set, ensure that preferred version is updated also if pref_id == 'language': bible = shared.get_bible_data(language_id=value_id) user_prefs['version'] = bible['default_version'] shared.clear_cache() shared.set_user_prefs(user_prefs) def main(pref_set_data_str): pref_id, pref_name, value_id, value_name = parse_pref_set_data_str( pref_set_data_str) set_pref(pref_id, value_id) print(pref_set_data_str.encode('utf-8')) if __name__ == '__main__': main(sys.argv[1].decode('utf-8'))
Revert "Switch to tuple for pref set data key list"
Revert "Switch to tuple for pref set data key list" This reverts commit 302d9797b7ccb46e7b9575513c0a2c5461e156a5.
Python
mit
caleb531/youversion-suggest,caleb531/youversion-suggest
--- +++ @@ -15,7 +15,7 @@ pref_set_data = json.loads( pref_set_data_str)['alfredworkflow']['variables'] return [pref_set_data[key] for key in - ('pref_id', 'pref_name', 'value_id', 'value_name')] + ['pref_id', 'pref_name', 'value_id', 'value_name']] # Set the YouVersion Suggest preference with the given key
f9d1bd9471196d5706c063a5ba3d3ca0531fbd1e
npactflask/npactflask/helpers.py
npactflask/npactflask/helpers.py
import os.path from flask import url_for from npactflask import app # TODO: I think this is more simply a template_global: # http://flask.pocoo.org/docs/0.10/api/#flask.Flask.template_global @app.context_processor def vSTATIC(): def STATICV(filename): if app.config['DEBUG']: vnum = os.path.getmtime(os.path.join(app.static_folder, filename)) else: vnum = app.config['VERSION'] return (url_for('static', filename=filename, vnum=vnum)) return dict(vSTATIC=STATICV)
from flask import url_for from npactflask import app @app.template_global() def vSTATIC(filename): if app.config['DEBUG']: return url_for('static', filename=filename) else: return url_for('static', filename=filename, vnum=app.config['VERSION'])
Disable vSTATIC version during DEBUG
Disable vSTATIC version during DEBUG when DEBUG is set we serve static files with max-age of zero which is hopefully good enough. Adding to the querystring is messing up chrome development workflow (breakpoints are associated with full uri, including querystring).
Python
bsd-3-clause
NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact
--- +++ @@ -1,18 +1,12 @@ -import os.path from flask import url_for - from npactflask import app -# TODO: I think this is more simply a template_global: -# http://flask.pocoo.org/docs/0.10/api/#flask.Flask.template_global [email protected]_processor -def vSTATIC(): - def STATICV(filename): - if app.config['DEBUG']: - vnum = os.path.getmtime(os.path.join(app.static_folder, filename)) - else: - vnum = app.config['VERSION'] - return (url_for('static', filename=filename, vnum=vnum)) - return dict(vSTATIC=STATICV) [email protected]_global() +def vSTATIC(filename): + if app.config['DEBUG']: + return url_for('static', filename=filename) + else: + return url_for('static', + filename=filename, vnum=app.config['VERSION'])
026f4e4bcd23e716db508d370e7c84fbf60f4bd0
scipy/io/arff/tests/test_header.py
scipy/io/arff/tests/test_header.py
#!/usr/bin/env python """Test for parsing arff headers only.""" import os from scipy.testing import * from scipy.io.arff.arffread import read_header, MetaData data_path = os.path.join(os.path.dirname(__file__), 'data') test1 = os.path.join(data_path, 'test1.arff') class HeaderTest(TestCase): def test_trivial1(self): """Parsing trivial header with nothing.""" ofile = open(test1) rel, attrs = read_header(ofile) # Test relation assert rel == 'test1' # Test numerical attributes assert len(attrs) == 5 for i in range(4): assert attrs[i][0] == 'attr%d' % i assert attrs[i][1] == 'REAL' classes = attrs[4][1] # Test nominal attribute assert attrs[4][0] == 'class' assert attrs[4][1] == '{class0, class1, class2, class3}' if __name__ == "__main__": nose.run(argv=['', __file__])
#!/usr/bin/env python """Test for parsing arff headers only.""" import os from scipy.testing import * from scipy.io.arff.arffread import read_header, MetaData data_path = os.path.join(os.path.dirname(__file__), 'data') test1 = os.path.join(data_path, 'test1.arff') class HeaderTest(TestCase): def test_fullheader1(self): """Parsing trivial header with nothing.""" ofile = open(test1) rel, attrs = read_header(ofile) # Test relation assert rel == 'test1' # Test numerical attributes assert len(attrs) == 5 for i in range(4): assert attrs[i][0] == 'attr%d' % i assert attrs[i][1] == 'REAL' classes = attrs[4][1] # Test nominal attribute assert attrs[4][0] == 'class' assert attrs[4][1] == '{class0, class1, class2, class3}' if __name__ == "__main__": nose.run(argv=['', __file__])
Change name for arff read test.
Change name for arff read test. git-svn-id: 003f22d385e25de9cff933a5ea4efd77cb5e7b28@4095 d6536bca-fef9-0310-8506-e4c0a848fbcf
Python
bsd-3-clause
lesserwhirls/scipy-cwt,scipy/scipy-svn,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,jasonmccampbell/scipy-refactor,scipy/scipy-svn,lesserwhirls/scipy-cwt,scipy/scipy-svn,lesserwhirls/scipy-cwt,jasonmccampbell/scipy-refactor,scipy/scipy-svn,lesserwhirls/scipy-cwt
--- +++ @@ -11,7 +11,7 @@ test1 = os.path.join(data_path, 'test1.arff') class HeaderTest(TestCase): - def test_trivial1(self): + def test_fullheader1(self): """Parsing trivial header with nothing.""" ofile = open(test1) rel, attrs = read_header(ofile)
7c9fbdb62c6b045476064fd4193fd133ed0de7c3
genderator/utils.py
genderator/utils.py
from unidecode import unidecode class Normalizer: def normalize(text): text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) # text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): return unidecode(text)
from unidecode import unidecode class Normalizer: def normalize(text): """ Normalize a given text applying all normalizations. Params: text: The text to be processed. Returns: The text normalized. """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): """ Remove hyphens from input text. Params: text: The text to be processed. Returns: The text without hyphens. """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): """ Remove extra whitespaces from input text. This function removes whitespaces from the beginning and the end of the string, but also duplicated whitespaces between words. Params: text: The text to be processed. Returns: The text without extra whitespaces. """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): """ Remove accent marks from input text. Params: text: The text to be processed. Returns: The text without accent marks. """ return unidecode(text)
Add accent marks normalization and missing docstrings
Add accent marks normalization and missing docstrings
Python
mit
davidmogar/genderator
--- +++ @@ -4,20 +4,59 @@ class Normalizer: def normalize(text): + """ + Normalize a given text applying all normalizations. + + Params: + text: The text to be processed. + + Returns: + The text normalized. + """ text = Normalizer.remove_extra_whitespaces(text) text = Normalizer.replace_hyphens(text) - # text = Normalizer.remove_accent_marks(text) + text = Normalizer.remove_accent_marks(text) return text.lower() @staticmethod def replace_hyphens(text): + """ + Remove hyphens from input text. + + Params: + text: The text to be processed. + + Returns: + The text without hyphens. + """ return text.replace('-', ' ') @staticmethod def remove_extra_whitespaces(text): + """ + Remove extra whitespaces from input text. + + This function removes whitespaces from the beginning and the end of + the string, but also duplicated whitespaces between words. + + Params: + text: The text to be processed. + + Returns: + The text without extra whitespaces. + """ return ' '.join(text.strip().split()); @staticmethod def remove_accent_marks(text): + """ + Remove accent marks from input text. + + Params: + text: The text to be processed. + + Returns: + The text without accent marks. + """ return unidecode(text)
0cab34e5f87b4484e0309aba8860d651afe06fb0
app/__init__.py
app/__init__.py
from flask import Flask, request, redirect from flask.ext.bootstrap import Bootstrap from config import configs from dmutils import apiclient, init_app, flask_featureflags from dmutils.content_loader import ContentLoader bootstrap = Bootstrap() data_api_client = apiclient.DataAPIClient() search_api_client = apiclient.SearchAPIClient() feature_flags = flask_featureflags.FeatureFlag() def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], bootstrap=bootstrap, data_api_client=data_api_client, feature_flags=feature_flags, search_api_client=search_api_client ) questions_builder = ContentLoader( "app/helpers/questions_manifest.yml", "app/content/g6/" ).get_builder() from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) main_blueprint.config = { 'BASE_TEMPLATE_DATA': application.config['BASE_TEMPLATE_DATA'], 'QUESTIONS_BUILDER': questions_builder } @application.before_request def remove_trailing_slash(): if request.path != '/' and request.path.endswith('/'): if request.query_string: return redirect( '{}?{}'.format( request.path[:-1], request.query_string.decode('utf-8') ), code=301 ) else: return redirect(request.path[:-1], code=301) return application
from flask import Flask, request, redirect from flask.ext.bootstrap import Bootstrap from config import configs from dmutils import apiclient, init_app, flask_featureflags from dmutils.content_loader import ContentLoader bootstrap = Bootstrap() data_api_client = apiclient.DataAPIClient() search_api_client = apiclient.SearchAPIClient() feature_flags = flask_featureflags.FeatureFlag() questions_loader = ContentLoader( "app/helpers/questions_manifest.yml", "app/content/g6/" ) def create_app(config_name): application = Flask(__name__) init_app( application, configs[config_name], bootstrap=bootstrap, data_api_client=data_api_client, feature_flags=feature_flags, search_api_client=search_api_client ) from .main import main as main_blueprint from .status import status as status_blueprint application.register_blueprint(status_blueprint) application.register_blueprint(main_blueprint) main_blueprint.config = { 'BASE_TEMPLATE_DATA': application.config['BASE_TEMPLATE_DATA'], } @application.before_request def remove_trailing_slash(): if request.path != '/' and request.path.endswith('/'): if request.query_string: return redirect( '{}?{}'.format( request.path[:-1], request.query_string.decode('utf-8') ), code=301 ) else: return redirect(request.path[:-1], code=301) return application
Move QUESTIONS_BUILDER from blueprint to a global variable
Move QUESTIONS_BUILDER from blueprint to a global variable
Python
mit
mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend
--- +++ @@ -9,6 +9,11 @@ data_api_client = apiclient.DataAPIClient() search_api_client = apiclient.SearchAPIClient() feature_flags = flask_featureflags.FeatureFlag() + +questions_loader = ContentLoader( + "app/helpers/questions_manifest.yml", + "app/content/g6/" +) def create_app(config_name): @@ -23,11 +28,6 @@ search_api_client=search_api_client ) - questions_builder = ContentLoader( - "app/helpers/questions_manifest.yml", - "app/content/g6/" - ).get_builder() - from .main import main as main_blueprint from .status import status as status_blueprint @@ -36,7 +36,6 @@ main_blueprint.config = { 'BASE_TEMPLATE_DATA': application.config['BASE_TEMPLATE_DATA'], - 'QUESTIONS_BUILDER': questions_builder } @application.before_request
e65ed7382c691d8ee19a22659ddb6deaa064e85b
kmip/__init__.py
kmip/__init__.py
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re # Dynamically set __version__ version_path = os.path.join(os.path.dirname( os.path.realpath(__file__)), 'version.py') with open(version_path, 'r') as version_file: mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE) __version__ = mo.group(1) __all__ = ['core', 'demos', 'services']
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re from kmip.core import enums # Dynamically set __version__ version_path = os.path.join(os.path.dirname( os.path.realpath(__file__)), 'version.py') with open(version_path, 'r') as version_file: mo = re.search(r"^.*= '(\d\.\d\.\d)'$", version_file.read(), re.MULTILINE) __version__ = mo.group(1) __all__ = [ 'core', 'demos', 'enums', 'services' ]
Update the kmip package to allow importing enums globally
Update the kmip package to allow importing enums globally This change updates the root-level kmip package, allowing users to now import enums directly from the kmip package: from kmip import enums Enumerations are used throughout the codebase and user applications and this will simplify usage and help obfuscate internal package details that may change in the future.
Python
apache-2.0
OpenKMIP/PyKMIP,OpenKMIP/PyKMIP
--- +++ @@ -16,6 +16,8 @@ import os import re +from kmip.core import enums + # Dynamically set __version__ version_path = os.path.join(os.path.dirname( os.path.realpath(__file__)), 'version.py') @@ -24,4 +26,9 @@ __version__ = mo.group(1) -__all__ = ['core', 'demos', 'services'] +__all__ = [ + 'core', + 'demos', + 'enums', + 'services' +]
3e0fbefa021c4c97024da30963845b201ff35089
dmaws/commands/paasmanifest.py
dmaws/commands/paasmanifest.py
import click import os from ..cli import cli_command from ..utils import load_file, template_string @cli_command('paas-manifest', max_apps=1) @click.option('--template', '-t', default='paas/manifest.j2', type=click.Path(exists=True), help="Manifest Jinja2 template file") @click.option('--out-file', '-o', help="Output file, if empty the template content is printed to the stdout") def paas_manifest(ctx, template, out_file): """Generate a PaaS manifest file from a Jinja2 template""" app = ctx.apps[0] if app not in ctx.variables: raise ValueError('Application configuration not found') templace_content = load_file(template) variables = { 'environment': ctx.environment, 'app': app.replace('_', '-') } variables.update(ctx.variables[app]) manifest_content = template_string(templace_content, variables) if out_file is not None: with open(out_file, 'w') as f: f.write(manifest_content) os.chmod(out_file, 0o600) else: print(manifest_content)
import click import os from ..cli import cli_command from ..utils import load_file, template_string, merge_dicts @cli_command('paas-manifest', max_apps=1) @click.option('--out-file', '-o', help="Output file, if empty the template content is printed to the stdout") def paas_manifest(ctx, out_file): """Generate a PaaS manifest file from a Jinja2 template""" app = ctx.apps[0] if app not in ctx.variables: raise ValueError('Application configuration not found') variables = { 'environment': ctx.environment, 'app': app.replace('_', '-') } template_content = load_file('paas/{}.j2'.format(variables['app'])) variables = merge_dicts(variables, ctx.variables) variables = merge_dicts(variables, ctx.variables[app]) manifest_content = template_string(template_content, variables, templates_path='paas/') if out_file is not None: with open(out_file, 'w') as f: f.write(manifest_content) os.chmod(out_file, 0o600) else: print(manifest_content)
Update paas-manifest command to load per-app manifests
Update paas-manifest command to load per-app manifests Removes template file option in favour of the app-specific manifests. Changes the way variables are set for the manifest template. Once the relevant variable files are loaded and merged the command will update the top-level namespace with the values from the application. This allows us to use the same base manifest template referencing generic top-level variables (eg `subdomain`, `instances`, `path`) that are overridden by application-specific values. Previously this was accomplished by using `stacks.yml` as the middle layer. The template variable change means that we can run into issues if we use clashing variable names accidentally, but at the same time it allows us to set common values for all applications. Eg: ``` instances: 3 api: instances: 5 ``` sets instance counts to 3 for all applications, but since the context will be updated with the `api` values the api manifest will only see `instances: 5` value.
Python
mit
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
--- +++ @@ -2,30 +2,30 @@ import os from ..cli import cli_command -from ..utils import load_file, template_string +from ..utils import load_file, template_string, merge_dicts @cli_command('paas-manifest', max_apps=1) [email protected]('--template', '-t', default='paas/manifest.j2', - type=click.Path(exists=True), - help="Manifest Jinja2 template file") @click.option('--out-file', '-o', help="Output file, if empty the template content is printed to the stdout") -def paas_manifest(ctx, template, out_file): +def paas_manifest(ctx, out_file): """Generate a PaaS manifest file from a Jinja2 template""" app = ctx.apps[0] if app not in ctx.variables: raise ValueError('Application configuration not found') - templace_content = load_file(template) variables = { 'environment': ctx.environment, 'app': app.replace('_', '-') } - variables.update(ctx.variables[app]) - manifest_content = template_string(templace_content, variables) + template_content = load_file('paas/{}.j2'.format(variables['app'])) + + variables = merge_dicts(variables, ctx.variables) + variables = merge_dicts(variables, ctx.variables[app]) + + manifest_content = template_string(template_content, variables, templates_path='paas/') if out_file is not None: with open(out_file, 'w') as f:
89a7a834638a1384bd9f1a560902b4d3aab29423
smoked/loader.py
smoked/loader.py
# coding: utf-8 from __future__ import unicode_literals from importlib import import_module from django.conf import settings from django.core.exceptions import ImproperlyConfigured def load_test_module(): """ Import test module and trigger registration of tests. Test module is defined in `SMOKE_TESTS` setting. """ test_module = getattr(settings, 'SMOKE_TESTS') if not test_module: raise ImproperlyConfigured('Missing SMOKE_TESTS in settings.') try: import_module(test_module) except ImportError as e: msg = "Can't import '{0}' module. Exception: {1}" raise ImproperlyConfigured(msg.format(test_module, e))
# coding: utf-8 from __future__ import unicode_literals from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module def load_test_module(): """ Import test module and trigger registration of tests. Test module is defined in `SMOKE_TESTS` setting. """ test_module = getattr(settings, 'SMOKE_TESTS') if not test_module: raise ImproperlyConfigured('Missing SMOKE_TESTS in settings.') try: import_module(test_module) except ImportError as e: msg = "Can't import '{0}' module. Exception: {1}" raise ImproperlyConfigured(msg.format(test_module, e))
Fix import of import_module for Py2.6
Fix import of import_module for Py2.6
Python
mit
djentlemen/django-smoked
--- +++ @@ -1,9 +1,9 @@ # coding: utf-8 from __future__ import unicode_literals -from importlib import import_module from django.conf import settings from django.core.exceptions import ImproperlyConfigured +from django.utils.importlib import import_module def load_test_module():
d2339fa094062c0672aef0ce938572aa3f5aead3
nintendo/sead/random.py
nintendo/sead/random.py
class Random: def __init__(self, seed): multiplier = 0x6C078965 temp = seed self.state = [] for i in range(1, 5): temp ^= temp >> 30 temp = (temp * multiplier + i) & 0xFFFFFFFF self.state.append(temp) def u32(self): temp = self.state[0] temp = (temp ^ (temp << 11)) & 0xFFFFFFFF temp ^= temp >> 8 temp ^= self.state[3] temp ^= self.state[3] >> 19 self.state[0] = self.state[1] self.state[1] = self.state[2] self.state[2] = self.state[3] self.state[3] = temp return temp def uint(self, max): return (self.u32() * max) >> 32
class Random: def __init__(self, *param): if len(param) == 1: self.set_seed(param[0]) elif len(param) == 4: self.set_state(*param) else: raise TypeError("Random.__init__ takes either 1 or 4 arguments") def set_seed(self, seed): multiplier = 0x6C078965 temp = seed self.state = [] for i in range(1, 5): temp ^= temp >> 30 temp = (temp * multiplier + i) & 0xFFFFFFFF self.state.append(temp) def set_state(self, s0, s1, s2, s3): self.state = [s0, s1, s2, s3] def u32(self): temp = self.state[0] temp = (temp ^ (temp << 11)) & 0xFFFFFFFF temp ^= temp >> 8 temp ^= self.state[3] temp ^= self.state[3] >> 19 self.state[0] = self.state[1] self.state[1] = self.state[2] self.state[2] = self.state[3] self.state[3] = temp return temp def uint(self, max): return (self.u32() * max) >> 32
Allow sead.Random to be constructed by internal state
Allow sead.Random to be constructed by internal state
Python
mit
Kinnay/NintendoClients
--- +++ @@ -1,6 +1,12 @@ class Random: - def __init__(self, seed): + def __init__(self, *param): + if len(param) == 1: self.set_seed(param[0]) + elif len(param) == 4: self.set_state(*param) + else: + raise TypeError("Random.__init__ takes either 1 or 4 arguments") + + def set_seed(self, seed): multiplier = 0x6C078965 temp = seed @@ -9,6 +15,9 @@ temp ^= temp >> 30 temp = (temp * multiplier + i) & 0xFFFFFFFF self.state.append(temp) + + def set_state(self, s0, s1, s2, s3): + self.state = [s0, s1, s2, s3] def u32(self): temp = self.state[0]
3e913e4267fd7750516edcbed1aa687e0cbd17fe
edx_repo_tools/oep2/__init__.py
edx_repo_tools/oep2/__init__.py
""" Top-level definition of the ``oep2`` commandline tool. """ import click from . import explode_repos_yaml from .report import cli def _cli(): cli(auto_envvar_prefix="OEP2") @click.group() def cli(): """ Tools for implementing and enforcing OEP-2. """ pass cli.add_command(explode_repos_yaml.explode) cli.add_command(explode_repos_yaml.implode) cli.add_command(cli.cli, 'report')
""" Top-level definition of the ``oep2`` commandline tool. """ import click from . import explode_repos_yaml from .report.cli import cli as report_cli def _cli(): cli(auto_envvar_prefix="OEP2") @click.group() def cli(): """ Tools for implementing and enforcing OEP-2. """ pass cli.add_command(explode_repos_yaml.explode) cli.add_command(explode_repos_yaml.implode) cli.add_command(report_cli, 'report')
Make oep-2 checker run again
Make oep-2 checker run again
Python
apache-2.0
edx/repo-tools,edx/repo-tools
--- +++ @@ -5,7 +5,7 @@ import click from . import explode_repos_yaml -from .report import cli +from .report.cli import cli as report_cli def _cli(): @@ -21,4 +21,4 @@ cli.add_command(explode_repos_yaml.explode) cli.add_command(explode_repos_yaml.implode) -cli.add_command(cli.cli, 'report') +cli.add_command(report_cli, 'report')
d85947ee083b0a5d7156b4e49fd5677ebeea33c7
brew/monitor.py
brew/monitor.py
import time import threading from . import app, mongo, controller from bson.objectid import ObjectId class Monitor(object): def __init__(self, timeout=10): self.thread = None self.exit_event = None self.timeout = timeout def temperature(self, brew_id): if self.thread: raise RuntimeError("Brew still ongoing") def run_in_background(): while True: if self.exit_event.wait(self.timeout): break with app.app_context(): temperature = controller.get_temperature() now = time.time() query = {'_id': ObjectId(brew_id)} op = {'$push': {'temperatures': (now, temperature)}} mongo.db.brews.update(query, op) self.exit_event = threading.Event() self.thread = threading.Thread(target=run_in_background) self.thread.start() def stop(self): self.exit_event.set() self.thread.join()
import time import threading from . import app, mongo, controller from bson.objectid import ObjectId class Monitor(object): def __init__(self, timeout=10): self.thread = None self.exit_event = None self.timeout = timeout def temperature(self, brew_id): if self.thread: raise RuntimeError("Brew still ongoing") def run_in_background(): while True: if self.exit_event.wait(self.timeout): break with app.app_context(): temperature = controller.get_temperature() now = time.time() query = {'_id': ObjectId(brew_id)} op = {'$push': {'temperatures': (now, temperature)}} mongo.db.brews.update(query, op) self.exit_event = threading.Event() self.thread = threading.Thread(target=run_in_background) self.thread.start() def stop(self): self.exit_event.set() self.thread.join() self.thread = None
Fix problem after stopping process
Fix problem after stopping process
Python
mit
brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister
--- +++ @@ -35,3 +35,4 @@ def stop(self): self.exit_event.set() self.thread.join() + self.thread = None
ccafafbd51422979ed93ed197135bf03b7d0be81
opps/images/__init__.py
opps/images/__init__.py
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image') settings.INSTALLED_APPS += ('thumbor',)
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image')
Remove thumbor use on init image, thumbor not django application
Remove thumbor use on init image, thumbor not django application
Python
mit
YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,williamroot/opps,williamroot/opps,jeanmask/opps,opps/opps
--- +++ @@ -5,4 +5,3 @@ trans_app_label = _('Image') -settings.INSTALLED_APPS += ('thumbor',)
1bfb33a7332cefdacb9b57137cf407ede0d8a919
examples/evaluate_similarity.py
examples/evaluate_similarity.py
# -*- coding: utf-8 -*- """ Simple example showing evaluating embedding on similarity datasets """ import logging from six import iteritems from web.datasets.similarity import fetch_MEN, fetch_WS353, fetch_SimLex999 from web.embeddings import fetch_GloVe from web.evaluate import evaluate_similarity # Configure logging logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%I:%M:%S') # Fetch GloVe embedding (warning: it might take few minutes) w_glove = fetch_GloVe(corpus="wiki-6B", dim=300) # Define tasks tasks = { "MEN": fetch_MEN(), "WS353": fetch_WS353(), "SIMLEX999": fetch_SimLex999() } # Print sample data for name, data in iteritems(tasks): print("Sample data from {}: pair \"{}\" and \"{}\" is assigned score {}".format(name, data.X[0][0], data.X[0][1], data.y[0])) # Calculate results using helper function for name, data in iteritems(tasks): print "Spearman correlation of scores on {} {}".format(name, evaluate_similarity(w_glove, data.X, data.y))
# -*- coding: utf-8 -*- """ Simple example showing evaluating embedding on similarity datasets """ import logging from six import iteritems from web.datasets.similarity import fetch_MEN, fetch_WS353, fetch_SimLex999 from web.embeddings import fetch_GloVe from web.evaluate import evaluate_similarity # Configure logging logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%I:%M:%S') # Fetch GloVe embedding (warning: it might take few minutes) w_glove = fetch_GloVe(corpus="wiki-6B", dim=300) # Define tasks tasks = { "MEN": fetch_MEN(), "WS353": fetch_WS353(), "SIMLEX999": fetch_SimLex999() } # Print sample data for name, data in iteritems(tasks): print("Sample data from {}: pair \"{}\" and \"{}\" is assigned score {}".format(name, data.X[0][0], data.X[0][1], data.y[0])) # Calculate results using helper function for name, data in iteritems(tasks): print("Spearman correlation of scores on {} {}".format(name, evaluate_similarity(w_glove, data.X, data.y)))
Fix print format for python3
Fix print format for python3
Python
mit
kudkudak/word-embeddings-benchmarks
--- +++ @@ -28,5 +28,4 @@ # Calculate results using helper function for name, data in iteritems(tasks): - print "Spearman correlation of scores on {} {}".format(name, evaluate_similarity(w_glove, data.X, data.y)) - + print("Spearman correlation of scores on {} {}".format(name, evaluate_similarity(w_glove, data.X, data.y)))
715a3c7005130b4fea8ac46132e2d2505f1901cf
pdf_generator/styles.py
pdf_generator/styles.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from reportlab.platypus import ( Paragraph as BaseParagraph, Spacer, ) from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet styles = getSampleStyleSheet() snormal = ParagraphStyle('normal') def Paragraph(text, style=snormal, **kw): if isinstance(style, basestring): style = styles[style] if kw: style = ParagraphStyle('style', parent=style, **kw) return BaseParagraph(text, style) def HSpacer(width): return Spacer(0, width)
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import from reportlab.platypus import ( Paragraph as BaseParagraph, Image as BaseImage, Spacer, ) from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet styles = getSampleStyleSheet() snormal = ParagraphStyle('normal') def Paragraph(text, style=snormal, **kw): if isinstance(style, basestring): style = styles[style] if kw: style = ParagraphStyle('style', parent=style, **kw) return BaseParagraph(text, style) def HSpacer(width): return Spacer(0, width) def Image(path, width=None, height=None, ratio=None): if width and ratio: height = width / ratio elif height and ratio: width = height * ratio return BaseImage(path, width, height)
Add a image shortcut to compute height and width with a ratio
Add a image shortcut to compute height and width with a ratio
Python
mit
cecedille1/PDF_generator
--- +++ @@ -5,6 +5,7 @@ from reportlab.platypus import ( Paragraph as BaseParagraph, + Image as BaseImage, Spacer, ) from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet @@ -25,3 +26,12 @@ def HSpacer(width): return Spacer(0, width) + + +def Image(path, width=None, height=None, ratio=None): + if width and ratio: + height = width / ratio + elif height and ratio: + width = height * ratio + + return BaseImage(path, width, height)
46de02b77c25c633b254dc81ed35da2443b287a9
lighty/wsgi/__init__.py
lighty/wsgi/__init__.py
import functools from .handler import handler from .urls import load_urls, resolve def WSGIApplication(app_settings): '''Create main application handler ''' class Application(object): settings = app_settings urls = load_urls(settings.urls) resolve_url = functools.partial(resolve, urls) return functools.partial(handler, Application, Application.resolve_url)
import functools import os from ..templates.loaders import FSLoader from .handler import handler from .urls import load_urls, resolve class BaseApplication(object): '''Base application class contains obly settings, urls and resolve_url method ''' def __init__(self, settings): self.settings = settings self.urls = load_urls(settings.urls) self.resolve_url = functools.partial(resolve, self.urls) class ComplexApplication(BaseApplication): '''Application loads also templates and database connection ''' def __init__(self, settings): super(ComplexApplication, self).__init__(settings) self.apps = settings.section('APPS') template_dirs = [] for app in self.apps: module = __import__(app, globals(), locals(), app.split('.')[-1]) template_dir = os.path.join(module.__path__[0], 'templates') if os.path.exists(template_dir): template_dirs.append(template_dir) try: template_dirs += settings.section('TEMPLATE_DIRS') except: pass self.template_loader = FSLoader(template_dirs) self.get_template = self.template_loader.get_template def WSGIApplication(app_settings): '''Create main application handler ''' application = ComplexApplication(app_settings) return functools.partial(handler, application, application.resolve_url)
Add ComplexApplication class for WSGI apps that uses not only urls resolving.
Add ComplexApplication class for WSGI apps that uses not only urls resolving.
Python
bsd-3-clause
GrAndSE/lighty
--- +++ @@ -1,16 +1,46 @@ import functools +import os + +from ..templates.loaders import FSLoader from .handler import handler from .urls import load_urls, resolve +class BaseApplication(object): + '''Base application class contains obly settings, urls and resolve_url + method + ''' + + def __init__(self, settings): + self.settings = settings + self.urls = load_urls(settings.urls) + self.resolve_url = functools.partial(resolve, self.urls) + + +class ComplexApplication(BaseApplication): + '''Application loads also templates and database connection + ''' + + def __init__(self, settings): + super(ComplexApplication, self).__init__(settings) + self.apps = settings.section('APPS') + template_dirs = [] + for app in self.apps: + module = __import__(app, globals(), locals(), app.split('.')[-1]) + template_dir = os.path.join(module.__path__[0], 'templates') + if os.path.exists(template_dir): + template_dirs.append(template_dir) + try: + template_dirs += settings.section('TEMPLATE_DIRS') + except: + pass + self.template_loader = FSLoader(template_dirs) + self.get_template = self.template_loader.get_template + + def WSGIApplication(app_settings): '''Create main application handler ''' - - class Application(object): - settings = app_settings - urls = load_urls(settings.urls) - resolve_url = functools.partial(resolve, urls) - - return functools.partial(handler, Application, Application.resolve_url) + application = ComplexApplication(app_settings) + return functools.partial(handler, application, application.resolve_url)
a1b4526f48fbd9e7f48c8bb6bc1a4763cc710448
fabric_bolt/web_hooks/tables.py
fabric_bolt/web_hooks/tables.py
import django_tables2 as tables from fabric_bolt.core.mixins.tables import ActionsColumn, PaginateTable from fabric_bolt.web_hooks import models class HookTable(PaginateTable): """Table used to show the configurations Also provides actions to edit and delete""" actions = ActionsColumn([ {'title': '<i class="glyphicon glyphicon-pencil"></i>', 'url': 'hooks_hook_update', 'args': [tables.A('pk')], 'attrs':{'data-toggle': 'tooltip', 'title': 'Edit Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}}, {'title': '<i class="glyphicon glyphicon-trash"></i>', 'url': 'hooks_hook_delete', 'args': [tables.A('pk')], 'attrs':{'data-toggle': 'tooltip', 'title': 'Delete Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}}, ], delimiter='&#160;&#160;&#160;') class Meta: model = models.Hook attrs = {"class": "table table-striped"} sequence = fields = ( 'url', )
import django_tables2 as tables from fabric_bolt.core.mixins.tables import ActionsColumn, PaginateTable from fabric_bolt.web_hooks import models class HookTable(PaginateTable): """Table used to show the configurations Also provides actions to edit and delete""" actions = ActionsColumn([ {'title': '<i class="glyphicon glyphicon-pencil"></i>', 'url': 'hooks_hook_update', 'args': [tables.A('pk')], 'attrs':{'data-toggle': 'tooltip', 'title': 'Edit Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}}, {'title': '<i class="glyphicon glyphicon-trash"></i>', 'url': 'hooks_hook_delete', 'args': [tables.A('pk')], 'attrs':{'data-toggle': 'tooltip', 'title': 'Delete Hook', 'data-delay': '{ "show": 300, "hide": 0 }'}}, ], delimiter='&#160;&#160;&#160;') class Meta: model = models.Hook attrs = {"class": "table table-striped"} sequence = fields = ( 'project', 'url', )
Add project to hook table so it's a little more clear what it's a global one.
Add project to hook table so it's a little more clear what it's a global one.
Python
mit
worthwhile/fabric-bolt,jproffitt/fabric-bolt,gvangool/fabric-bolt,qdqmedia/fabric-bolt,damoguyan8844/fabric-bolt,qdqmedia/fabric-bolt,npardington/fabric-bolt,fabric-bolt/fabric-bolt,maximon93/fabric-bolt,leominov/fabric-bolt,lethe3000/fabric-bolt,worthwhile/fabric-bolt,maximon93/fabric-bolt,damoguyan8844/fabric-bolt,lethe3000/fabric-bolt,maximon93/fabric-bolt,jproffitt/fabric-bolt,damoguyan8844/fabric-bolt,brajput24/fabric-bolt,gvangool/fabric-bolt,worthwhile/fabric-bolt,paperreduction/fabric-bolt,leominov/fabric-bolt,paperreduction/fabric-bolt,brajput24/fabric-bolt,leominov/fabric-bolt,fabric-bolt/fabric-bolt,npardington/fabric-bolt,lethe3000/fabric-bolt,gvangool/fabric-bolt,npardington/fabric-bolt,jproffitt/fabric-bolt,brajput24/fabric-bolt,fabric-bolt/fabric-bolt,qdqmedia/fabric-bolt,paperreduction/fabric-bolt
--- +++ @@ -21,5 +21,6 @@ model = models.Hook attrs = {"class": "table table-striped"} sequence = fields = ( + 'project', 'url', )
377fa94c2963a9c2522164ff374431dbe836217e
indra/sources/rlimsp/api.py
indra/sources/rlimsp/api.py
__all__ = ['process_pmc'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' class RLIMSP_Error(Exception): pass def process_pmc(pmcid, with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- pmcid : str A PMCID, with the prefix PMC, of the paper to be "read". with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) else: resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
__all__ = ['process_from_webservice'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass def process_from_webservice(id_val, id_type='pmcid', source='pmc', with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- id_val : str A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to be "read". id_type : str Either 'pmid' or 'pmcid'. The default is 'pmcid'. source : str Either 'pmc' or 'medline', whether you want pmc fulltext or medline abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: fmt = '%s.normed/%s/%s' else: fmt = '%s/%s/%s' resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
Add capability to read pmids and get medline.
Add capability to read pmids and get medline.
Python
bsd-2-clause
sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra
--- +++ @@ -1,4 +1,4 @@ -__all__ = ['process_pmc'] +__all__ = ['process_from_webservice'] import logging import requests @@ -9,29 +9,38 @@ logger = logging.getLogger(__name__) -RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' +RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass -def process_pmc(pmcid, with_grounding=True): +def process_from_webservice(id_val, id_type='pmcid', source='pmc', + with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- - pmcid : str - A PMCID, with the prefix PMC, of the paper to be "read". + id_val : str + A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to + be "read". + id_type : str + Either 'pmid' or 'pmcid'. The default is 'pmcid'. + source : str + Either 'pmc' or 'medline', whether you want pmc fulltext or medline + abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: - resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) + fmt = '%s.normed/%s/%s' else: - resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) + fmt = '%s/%s/%s' + + resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s"
c4a827f7ddfd6d43120253bd660e426840dc5cba
src/pdsm/utils.py
src/pdsm/utils.py
import re def ensure_trailing_slash(mystr): if not mystr.endswith('/'): mystr = mystr + '/' return mystr def remove_trailing_slash(mystr): if mystr.endswith('/'): mystr = mystr[:-1] return mystr def split_s3_bucket_key(s3_path): if s3_path.startswith('s3://'): s3_path = s3_path[5:] parts = s3_path.split('/') return parts[0], '/'.join(parts[1:]) def underscore(mystr): mystr = re.sub(r'([A-Z]+)([A-Z][a-z])', r'\1_\2', mystr) mystr = re.sub(r'([a-z\d])([A-Z])', r'\1_\2', mystr) mystr = mystr.replace('-', '_') return mystr.lower() def chunks(l, n): for i in xrange(0, len(l), n): yield l[i:i+n]
import re def ensure_trailing_slash(mystr): if not mystr.endswith('/'): mystr = mystr + '/' return mystr def remove_trailing_slash(mystr): if mystr.endswith('/'): mystr = mystr[:-1] return mystr def split_s3_bucket_key(s3_path): if s3_path.startswith('s3://'): s3_path = s3_path[5:] parts = s3_path.split('/') return parts[0], '/'.join(parts[1:]) def underscore(mystr): mystr = re.sub(r'([A-Z]+)([A-Z][a-z])', r'\1_\2', mystr) mystr = re.sub(r'([a-z\d])([A-Z])', r'\1_\2', mystr) mystr = mystr.replace('-', '_') return mystr.lower() def chunks(l, n): for i in range(0, len(l), n): yield l[i:i+n]
Use range so code works with python 3
Use range so code works with python 3
Python
mit
robotblake/pdsm
--- +++ @@ -28,5 +28,5 @@ def chunks(l, n): - for i in xrange(0, len(l), n): + for i in range(0, len(l), n): yield l[i:i+n]
c17aed93f3dd5a1a46dfb871268ebda4e56b1bee
lib/excel.py
lib/excel.py
"""EcoData Retriever Excel Functions This module contains optional functions for importing data from Excel. """ class Excel: @staticmethod def empty_cell(cell): """Tests whether an excel cell is empty or contains only whitespace""" if cell.ctype == 0: return True if str(cell.value).strip() == "": return True return False @staticmethod def cell_value(cell): """Returns the string value of an excel spreadsheet cell""" return str(cell.value).strip()
"""EcoData Retriever Excel Functions This module contains optional functions for importing data from Excel. """ class Excel: @staticmethod def empty_cell(cell): """Tests whether an excel cell is empty or contains only whitespace""" if cell.ctype == 0: return True if str(cell.value).strip() == "": return True return False @staticmethod def cell_value(cell): """Returns the string value of an excel spreadsheet cell""" if (cell.value).__class__.__name__ == 'unicode': return (cell.value).encode('utf-8').strip() return str(cell.value).strip()
Handle special characters in xls cell values
Handle special characters in xls cell values
Python
mit
davharris/retriever,goelakash/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,goelakash/retriever,embaldridge/retriever,davharris/retriever,henrykironde/deletedret,embaldridge/retriever
--- +++ @@ -17,4 +17,6 @@ @staticmethod def cell_value(cell): """Returns the string value of an excel spreadsheet cell""" + if (cell.value).__class__.__name__ == 'unicode': + return (cell.value).encode('utf-8').strip() return str(cell.value).strip()
b6d29826932d92662a7cd14b6f54327c9e8d4c0f
model/reporting_year.py
model/reporting_year.py
#! /usr/bin/env python2.7 import model class ReportingYear(model.Model): pass ReportingYear.init_model("reporting_years", "reporting_year")
#! /usr/bin/env python2.7 import model class ReportingYear(model.Model): pass ReportingYear.init_model("reporting_years", "reporting_year_id")
Fix a bug in the ReportingYear initialization
Fix a bug in the ReportingYear initialization
Python
bsd-3-clause
UngaForskareStockholm/medlem2
--- +++ @@ -4,4 +4,4 @@ class ReportingYear(model.Model): pass -ReportingYear.init_model("reporting_years", "reporting_year") +ReportingYear.init_model("reporting_years", "reporting_year_id")
f2b2927216c6392625d22fac4bcc6f9005ec51fb
landing/tests/test_views.py
landing/tests/test_views.py
from django.core.urlresolvers import resolve from django.test import TestCase, RequestFactory import unittest from landing.views import LandingView class LandingPageTests(TestCase): def test_root_url_resolves_to_landing_page_view(self): found = resolve('/') self.assertEqual(found.func.__name__, LandingView.as_view().__name__) def test_landing_page_uses_correct_templates(self): response = self.client.get('/') self.assertTemplateUsed(response, 'base.html') self.assertTemplateUsed(response, 'landing/index.html')
from django.core.urlresolvers import resolve from django.test import TestCase, RequestFactory import unittest from landing.views import LandingView class LandingPageTests(TestCase): def test_root_url_resolves_to_landing_page_view(self): found = resolve('/en/') self.assertEqual(found.func.__name__, LandingView.as_view().__name__) def test_landing_page_uses_correct_templates(self): response = self.client.get('/en/') self.assertTemplateUsed(response, 'base.html') self.assertTemplateUsed(response, 'landing/index.html')
Make the landing page tests work again with i18n url
Make the landing page tests work again with i18n url
Python
mit
XeryusTC/projman,XeryusTC/projman,XeryusTC/projman
--- +++ @@ -6,10 +6,10 @@ class LandingPageTests(TestCase): def test_root_url_resolves_to_landing_page_view(self): - found = resolve('/') + found = resolve('/en/') self.assertEqual(found.func.__name__, LandingView.as_view().__name__) def test_landing_page_uses_correct_templates(self): - response = self.client.get('/') + response = self.client.get('/en/') self.assertTemplateUsed(response, 'base.html') self.assertTemplateUsed(response, 'landing/index.html')
8eb740cf678d15b2c1c299580c1f60d37528f3be
lbrynet/__init__.py
lbrynet/__init__.py
import logging __version__ = "0.21.0rc5" version = tuple(__version__.split('.')) logging.getLogger(__name__).addHandler(logging.NullHandler())
import logging __version__ = "0.21.0rc6" version = tuple(__version__.split('.')) logging.getLogger(__name__).addHandler(logging.NullHandler())
Bump version 0.21.0rc5 --> 0.21.0rc6
Bump version 0.21.0rc5 --> 0.21.0rc6 Signed-off-by: Jack Robison <[email protected]>
Python
mit
lbryio/lbry,lbryio/lbry,lbryio/lbry
--- +++ @@ -1,6 +1,6 @@ import logging -__version__ = "0.21.0rc5" +__version__ = "0.21.0rc6" version = tuple(__version__.split('.')) logging.getLogger(__name__).addHandler(logging.NullHandler())
e406def7737c3b4528ccb7345ba68ece61edcc84
easyfuse/utils.py
easyfuse/utils.py
""" This module implements the some simple utilitiy functions. .. :copyright: (c) 2016 by Jelte Fennema. :license: MIT, see License for more details. """ from llfuse import FUSEError from contextlib import contextmanager import errno import logging @contextmanager def _convert_error_to_fuse_error(action, thing): try: yield except Exception as e: if isinstance(e, FUSEError): raise e logging.error('Something went wrong when %s %s: %s', action, thing, e) raise FUSEError(errno.EAGAIN)
""" This module implements the some simple utilitiy functions. .. :copyright: (c) 2016 by Jelte Fennema. :license: MIT, see License for more details. """ from llfuse import FUSEError from contextlib import contextmanager import errno import logging import traceback @contextmanager def _convert_error_to_fuse_error(action, thing): try: yield except Exception as e: if isinstance(e, FUSEError): raise e logging.error('Something went wrong when %s %s: %s', action, thing, e) if logging.getLogger().isEnabledFor(logging.DEBUG): # DEBUG logging, print stacktrace traceback.print_exc() raise FUSEError(errno.EAGAIN)
Print traceback on captured errors when loglevel is DEBUG
Print traceback on captured errors when loglevel is DEBUG
Python
mit
JelteF/easyfuse,JelteF/easyfuse
--- +++ @@ -10,6 +10,7 @@ from contextlib import contextmanager import errno import logging +import traceback @contextmanager @@ -20,4 +21,7 @@ if isinstance(e, FUSEError): raise e logging.error('Something went wrong when %s %s: %s', action, thing, e) + if logging.getLogger().isEnabledFor(logging.DEBUG): + # DEBUG logging, print stacktrace + traceback.print_exc() raise FUSEError(errno.EAGAIN)
15a8d73d38a1fb254bf38bdfc0c9ebd15b1af05e
elmo/elmo/urls.py
elmo/elmo/urls.py
"""elmo URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from django.views.generic import TemplateView urlpatterns = [ url(r'^/', TemplateView.as_view(template_name='landing.html')), url(r'^admin/', admin.site.urls), url(r'^moons/', include('moon_tracker.urls')) ]
"""elmo URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.11/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin from django.views.generic import TemplateView urlpatterns = [ url(r'^$', TemplateView.as_view(template_name='landing.html')), url(r'^admin/', admin.site.urls), url(r'^moons/', include('moon_tracker.urls')) ]
Repair the regex for the homepage.
Repair the regex for the homepage.
Python
mit
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
--- +++ @@ -18,7 +18,7 @@ from django.views.generic import TemplateView urlpatterns = [ - url(r'^/', TemplateView.as_view(template_name='landing.html')), + url(r'^$', TemplateView.as_view(template_name='landing.html')), url(r'^admin/', admin.site.urls), url(r'^moons/', include('moon_tracker.urls')) ]
72ee9b82e9f91436aeeba144ad687435d349b85a
systempay/app.py
systempay/app.py
from django.conf.urls import patterns, url from oscar.core.application import Application from systempay import views class SystemPayApplication(Application): name = 'systempay' secure_redirect_view = views.SecureRedirectView place_order_view = views.PlaceOrderView return_response_view = views.ReturnResponseView cancel_response_view = views.CancelResponseView handle_ipn_view = views.HandleIPN def __init__(self, *args, **kwargs): super(SystemPayApplication, self).__init__(*args, **kwargs) def get_urls(self): urlpatterns = super(SystemPayApplication, self).get_urls() urlpatterns += patterns('', url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'), url(r'^preview/', self.place_order_view.as_view(preview=True), name='preview'), url(r'^place-order/', self.place_order_view.as_view(), name='place-order'), url(r'^return/', self.return_response_view.as_view(), name='return-response'), url(r'^cancel/', self.cancel_response_view.as_view(), name='cancel-response'), url(r'^handle-ipn$', self.handle_ipn_view.as_view(), name='handle-ipn'), ) return self.post_process_urls(urlpatterns) application = SystemPayApplication()
from django.conf.urls import patterns, url from oscar.core.application import Application from systempay import views class SystemPayApplication(Application): name = 'systempay' secure_redirect_view = views.SecureRedirectView place_order_view = views.PlaceOrderView return_response_view = views.ReturnResponseView cancel_response_view = views.CancelResponseView handle_ipn_view = views.HandleIPN def __init__(self, *args, **kwargs): super(SystemPayApplication, self).__init__(*args, **kwargs) def get_urls(self): urlpatterns = super(SystemPayApplication, self).get_urls() urlpatterns += patterns('', url(r'^secure-redirect$', self.secure_redirect_view.as_view(), name='secure-redirect'), url(r'^preview$', self.place_order_view.as_view(preview=True), name='preview'), url(r'^place-order', self.place_order_view.as_view(), name='place-order'), url(r'^return$', self.return_response_view.as_view(), name='return-response'), url(r'^cancel$', self.cancel_response_view.as_view(), name='cancel-response'), url(r'^handle-ipn$', self.handle_ipn_view.as_view(), name='handle-ipn'), ) return self.post_process_urls(urlpatterns) application = SystemPayApplication()
Remove ending slashes from urls
Remove ending slashes from urls
Python
mit
bastien34/django-oscar-systempay,dulaccc/django-oscar-systempay,bastien34/django-oscar-systempay
--- +++ @@ -19,14 +19,14 @@ def get_urls(self): urlpatterns = super(SystemPayApplication, self).get_urls() urlpatterns += patterns('', - url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'), - url(r'^preview/', self.place_order_view.as_view(preview=True), + url(r'^secure-redirect$', self.secure_redirect_view.as_view(), name='secure-redirect'), + url(r'^preview$', self.place_order_view.as_view(preview=True), name='preview'), - url(r'^place-order/', self.place_order_view.as_view(), + url(r'^place-order', self.place_order_view.as_view(), name='place-order'), - url(r'^return/', self.return_response_view.as_view(), + url(r'^return$', self.return_response_view.as_view(), name='return-response'), - url(r'^cancel/', self.cancel_response_view.as_view(), + url(r'^cancel$', self.cancel_response_view.as_view(), name='cancel-response'), url(r'^handle-ipn$', self.handle_ipn_view.as_view(), name='handle-ipn'),
f992c901748787acb4f3235b46e27488967b9a60
taptaptap/exc.py
taptaptap/exc.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ exc.py ~~~~~~ Exceptions for TAP file handling. (c) BSD 3-clause. """ from __future__ import division, absolute_import from __future__ import print_function, unicode_literals import os import sys __all__ = ['TapParseError', 'TapMissingPlan', 'TapInvalidNumbering', 'TapBailout'] class TapParseError(Exception): pass class TapMissingPlan(TapParseError): pass class TapInvalidNumbering(TapParseError): pass class TapBailout(Exception): is_testcase = False is_bailout = True encoding = sys.stdout.encoding def __init__(self, *args, **kwargs): super(TapBailout, self).__init__(*args, **kwargs) self.data = [] def __str__(self): return unicode(self).encode(self.encoding or 'utf-8') def __unicode__(self): return u'Bail out! {}{}{}'.format(self.message, os.linesep, os.linesep.join(self.data)) def copy(self, memo=None): inst = TapBailout(self.message) inst.data = self.data return inst
#!/usr/bin/env python # -*- coding: utf-8 -*- """ exc.py ~~~~~~ Exceptions for TAP file handling. (c) BSD 3-clause. """ from __future__ import division, absolute_import from __future__ import print_function, unicode_literals import os import sys __all__ = ['TapParseError', 'TapMissingPlan', 'TapInvalidNumbering', 'TapBailout'] class TapParseError(Exception): pass class TapMissingPlan(TapParseError): pass class TapInvalidNumbering(TapParseError): pass class TapBailout(Exception): is_testcase = False is_bailout = True encoding = sys.stdout.encoding def __init__(self, *args, **kwargs): super(TapBailout, self).__init__(*args, **kwargs) self.data = [] def __str__(self): return unicode(self).encode(self.encoding or 'utf-8') def __unicode__(self): return u'Bail out! {}{}{}'.format(self.message, os.linesep, os.linesep.join(self.data)) def copy(self, memo=None): inst = TapBailout(memo or self.message) inst.data = self.data return inst
Use memo parameter in TapBailout.copy
[Bugfix] Use memo parameter in TapBailout.copy
Python
bsd-3-clause
meisterluk/taptaptap,meisterluk/taptaptap
--- +++ @@ -49,6 +49,6 @@ os.linesep.join(self.data)) def copy(self, memo=None): - inst = TapBailout(self.message) + inst = TapBailout(memo or self.message) inst.data = self.data return inst
b874a5d3f54ef7ba71af18474a96e835d97bb846
chat/views.py
chat/views.py
from datetime import datetime, timedelta import jwt import os from django.shortcuts import render from django.conf import settings from django.views.generic.base import TemplateView key = os.path.join( os.path.dirname(__file__), 'ecc', 'key.pem', ) with open(key, 'r') as fh: ecc_private = fh.read() # Create your views here. class NabuView(TemplateView): template_name = 'chat/nabu.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) data = { 'sub': 'Kromey', 'iss': self.request.headers['Host'], 'aud': self.request.headers['Host'], 'exp': datetime.utcnow() + timedelta(seconds=30), } token = jwt.encode(data, ecc_private, algorithm='ES256') context['token'] = token.decode('utf-8') return context
from datetime import datetime, timedelta import jwt import os from django.shortcuts import render from django.conf import settings from django.views.generic.base import TemplateView key = os.path.join( os.path.dirname(__file__), 'ecc', 'key.pem', ) with open(key, 'r') as fh: ecc_private = fh.read() # Create your views here. class NabuView(TemplateView): template_name = 'chat/nabu.html' def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) data = { 'sub': 'Kromey', 'iss': settings.NABU['jwt']['iss'], 'aud': settings.NABU['jwt']['aud'], 'exp': datetime.utcnow() + timedelta(**settings.NABU['jwt']['exp']), } token = jwt.encode(data, ecc_private, algorithm='ES256') context['token'] = token.decode('utf-8') return context
Use Nabu settings in token generation
Use Nabu settings in token generation
Python
mit
Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/fbxnano,Kromey/akwriters
--- +++ @@ -26,9 +26,9 @@ data = { 'sub': 'Kromey', - 'iss': self.request.headers['Host'], - 'aud': self.request.headers['Host'], - 'exp': datetime.utcnow() + timedelta(seconds=30), + 'iss': settings.NABU['jwt']['iss'], + 'aud': settings.NABU['jwt']['aud'], + 'exp': datetime.utcnow() + timedelta(**settings.NABU['jwt']['exp']), } token = jwt.encode(data, ecc_private, algorithm='ES256') context['token'] = token.decode('utf-8')
d8702486851c59d8b030a63aefee2b5ca152772e
test_projects/django14/pizzagigi/urls.py
test_projects/django14/pizzagigi/urls.py
from django.conf.urls import patterns, url from django.views.generic import TemplateView from .views import ( PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView, PizzaUpdateView, ChickenWingsListView ) urlpatterns = patterns('', # NOQA url(r'^$', PizzaListView.as_view(), name='list'), url(r'^create/$', PizzaCreateView.as_view(), name='create'), url(r'^created/$', TemplateView.as_view( template_name='pizzagigi/pizza_created.html'), name='created'), url(r'^detail/(?P<pk>[0-9]*)$', PizzaDetailView.as_view(), name='detail'), url(r'^update/(?P<pk>[0-9]*)$', PizzaUpdateView.as_view(), name='update'), url(r'^updated/$', TemplateView.as_view( template_name='pizzagigi/pizza_updated.html'), name='updated'), url(r'^delete/(?P<pk>[0-9]*)$', PizzaDeleteView.as_view(), name='delete'), url(r'^deleted/$', TemplateView.as_view( template_name='pizzagigi/pizza_deleted.html'), name='deleted'), url(r'^wings/$', ChickenWingsListView.as_view(), name='chickenwings_list'), )
from django.conf.urls import patterns, url from django.views.generic import TemplateView from .views import ( PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView, PizzaUpdateView ) urlpatterns = patterns('', # NOQA url(r'^$', PizzaListView.as_view(), name='list'), url(r'^create/$', PizzaCreateView.as_view(), name='create'), url(r'^created/$', TemplateView.as_view( template_name='pizzagigi/pizza_created.html'), name='created'), url(r'^detail/(?P<pk>[0-9]*)$', PizzaDetailView.as_view(), name='detail'), url(r'^update/(?P<pk>[0-9]*)$', PizzaUpdateView.as_view(), name='update'), url(r'^updated/$', TemplateView.as_view( template_name='pizzagigi/pizza_updated.html'), name='updated'), url(r'^delete/(?P<pk>[0-9]*)$', PizzaDeleteView.as_view(), name='delete'), url(r'^deleted/$', TemplateView.as_view( template_name='pizzagigi/pizza_deleted.html'), name='deleted'), )
Move chickens to other app
Move chickens to other app
Python
bsd-3-clause
kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field,kelvinwong-ca/django-select-multiple-field
--- +++ @@ -3,8 +3,7 @@ from .views import ( PizzaCreateView, PizzaDeleteView, PizzaDetailView, PizzaListView, - PizzaUpdateView, - ChickenWingsListView + PizzaUpdateView ) urlpatterns = patterns('', # NOQA @@ -21,6 +20,4 @@ url(r'^deleted/$', TemplateView.as_view( template_name='pizzagigi/pizza_deleted.html'), name='deleted'), - url(r'^wings/$', ChickenWingsListView.as_view(), name='chickenwings_list'), - )
4bd930b8bc6410a9966327c8e73e0b1849c71157
sympy/conftest.py
sympy/conftest.py
import sys sys._running_pytest = True from sympy.core.cache import clear_cache def pytest_terminal_summary(terminalreporter): if (terminalreporter.stats.get('error', None) or terminalreporter.stats.get('failed', None)): terminalreporter.write_sep(' ', 'DO *NOT* COMMIT!', red=True, bold=True) def pytest_runtest_teardown(): clear_cache()
import sys sys._running_pytest = True from sympy.core.cache import clear_cache def pytest_report_header(config): from sympy.utilities.misc import ARCH s = "architecture: %s\n" % ARCH from sympy.core.cache import USE_CACHE s += "cache: %s\n" % USE_CACHE from sympy.polys.domains import GROUND_TYPES s += "ground types: %s\n" % GROUND_TYPES return s def pytest_terminal_summary(terminalreporter): if (terminalreporter.stats.get('error', None) or terminalreporter.stats.get('failed', None)): terminalreporter.write_sep(' ', 'DO *NOT* COMMIT!', red=True, bold=True) def pytest_runtest_teardown(): clear_cache()
Add more info to pytest header
Add more info to pytest header
Python
bsd-3-clause
moble/sympy,ahhda/sympy,chaffra/sympy,saurabhjn76/sympy,saurabhjn76/sympy,ga7g08/sympy,AkademieOlympia/sympy,sampadsaha5/sympy,hrashk/sympy,jbbskinny/sympy,chaffra/sympy,yukoba/sympy,Designist/sympy,abhiii5459/sympy,atsao72/sympy,pandeyadarsh/sympy,kevalds51/sympy,postvakje/sympy,sahmed95/sympy,beni55/sympy,Vishluck/sympy,asm666/sympy,rahuldan/sympy,garvitr/sympy,AunShiLord/sympy,MechCoder/sympy,shipci/sympy,bukzor/sympy,ga7g08/sympy,chaffra/sympy,ga7g08/sympy,sahilshekhawat/sympy,Gadal/sympy,emon10005/sympy,madan96/sympy,MechCoder/sympy,jaimahajan1997/sympy,MridulS/sympy,kaushik94/sympy,bukzor/sympy,oliverlee/sympy,lidavidm/sympy,Shaswat27/sympy,drufat/sympy,Gadal/sympy,Designist/sympy,cccfran/sympy,hargup/sympy,souravsingh/sympy,atsao72/sympy,diofant/diofant,dqnykamp/sympy,farhaanbukhsh/sympy,hrashk/sympy,abloomston/sympy,Titan-C/sympy,abhiii5459/sympy,farhaanbukhsh/sympy,mafiya69/sympy,lidavidm/sympy,pbrady/sympy,jamesblunt/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,Titan-C/sympy,drufat/sympy,pandeyadarsh/sympy,dqnykamp/sympy,maniteja123/sympy,sunny94/temp,debugger22/sympy,meghana1995/sympy,ahhda/sympy,cswiercz/sympy,meghana1995/sympy,AkademieOlympia/sympy,jbbskinny/sympy,hargup/sympy,toolforger/sympy,kaushik94/sympy,mcdaniel67/sympy,Vishluck/sympy,kaichogami/sympy,wyom/sympy,lindsayad/sympy,Shaswat27/sympy,atsao72/sympy,srjoglekar246/sympy,Davidjohnwilson/sympy,sahilshekhawat/sympy,Davidjohnwilson/sympy,dqnykamp/sympy,sahmed95/sympy,moble/sympy,farhaanbukhsh/sympy,rahuldan/sympy,ChristinaZografou/sympy,skidzo/sympy,MridulS/sympy,kevalds51/sympy,shipci/sympy,jamesblunt/sympy,VaibhavAgarwalVA/sympy,beni55/sympy,hrashk/sympy,ChristinaZografou/sympy,pbrady/sympy,MechCoder/sympy,sampadsaha5/sympy,Designist/sympy,Curious72/sympy,souravsingh/sympy,wyom/sympy,lidavidm/sympy,abhiii5459/sympy,moble/sympy,wyom/sympy,Mitchkoens/sympy,Mitchkoens/sympy,liangjiaxing/sympy,Arafatk/sympy,hargup/sympy,yukoba/sympy,jbbskinny/sympy,postvakje/sympy,garvitr/sympy,grevutiu-gabriel/sympy,MridulS/sympy,mcdaniel67/sympy,Titan-C/sympy,cccfran/sympy,shipci/sympy,amitjamadagni/sympy,Curious72/sympy,cswiercz/sympy,toolforger/sympy,Arafatk/sympy,kmacinnis/sympy,asm666/sympy,iamutkarshtiwari/sympy,atreyv/sympy,kumarkrishna/sympy,atreyv/sympy,debugger22/sympy,wanglongqi/sympy,VaibhavAgarwalVA/sympy,saurabhjn76/sympy,pandeyadarsh/sympy,Gadal/sympy,aktech/sympy,shikil/sympy,amitjamadagni/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,Mitchkoens/sympy,kumarkrishna/sympy,drufat/sympy,skidzo/sympy,maniteja123/sympy,kmacinnis/sympy,wanglongqi/sympy,kmacinnis/sympy,toolforger/sympy,skirpichev/omg,jerli/sympy,liangjiaxing/sympy,cswiercz/sympy,lindsayad/sympy,mafiya69/sympy,beni55/sympy,atreyv/sympy,abloomston/sympy,yukoba/sympy,cccfran/sympy,rahuldan/sympy,postvakje/sympy,shikil/sympy,shikil/sympy,Sumith1896/sympy,lindsayad/sympy,sunny94/temp,yashsharan/sympy,ahhda/sympy,asm666/sympy,AunShiLord/sympy,jamesblunt/sympy,grevutiu-gabriel/sympy,madan96/sympy,jaimahajan1997/sympy,sunny94/temp,liangjiaxing/sympy,jerli/sympy,emon10005/sympy,Vishluck/sympy,yashsharan/sympy,kaichogami/sympy,skidzo/sympy,kevalds51/sympy,mafiya69/sympy,AunShiLord/sympy,vipulroxx/sympy,vipulroxx/sympy,kumarkrishna/sympy,oliverlee/sympy,debugger22/sympy,grevutiu-gabriel/sympy,Davidjohnwilson/sympy,sampadsaha5/sympy,sahilshekhawat/sympy,Shaswat27/sympy,maniteja123/sympy,pbrady/sympy,emon10005/sympy,aktech/sympy,ChristinaZografou/sympy,bukzor/sympy,flacjacket/sympy,Curious72/sympy,mcdaniel67/sympy,oliverlee/sympy,Arafatk/sympy,sahmed95/sympy,souravsingh/sympy,Sumith1896/sympy,garvitr/sympy,abloomston/sympy,meghana1995/sympy,kaushik94/sympy,Sumith1896/sympy,AkademieOlympia/sympy,kaichogami/sympy,aktech/sympy,wanglongqi/sympy,iamutkarshtiwari/sympy,jerli/sympy,vipulroxx/sympy,jaimahajan1997/sympy
--- +++ @@ -2,6 +2,15 @@ sys._running_pytest = True from sympy.core.cache import clear_cache + +def pytest_report_header(config): + from sympy.utilities.misc import ARCH + s = "architecture: %s\n" % ARCH + from sympy.core.cache import USE_CACHE + s += "cache: %s\n" % USE_CACHE + from sympy.polys.domains import GROUND_TYPES + s += "ground types: %s\n" % GROUND_TYPES + return s def pytest_terminal_summary(terminalreporter): if (terminalreporter.stats.get('error', None) or
f5fb36875b09926effdae46a92497d01fa04e777
src/models/lm.py
src/models/lm.py
from keras.layers import LSTM, Input, Reshape from keras.models import Model from ..layers import LMMask, Projection class LanguageModel(Model): def __init__(self, n_batch, d_W, d_L, trainable=True): """ n_batch :: batch size for model application d_L :: language model state dimension (and output vector size) d_W :: input word embedding size (word features) """ w_n = Input(batch_shape=(n_batch, d_W), name='w_n', dtype='floatX') w_nmask = Input(batch_shape=(n_batch, 1), name='w_nmask', dtype='int8') # Prevent padded samples to affect internal state (and cause NaN loss in worst # case) by masking them by using another input value w_nmasked = LMMask(0.)([Reshape((1, d_W))(w_n), w_nmask]) # Using stateful LSTM for language model - model fitting code resets the # state after each sentence w_np1Ei = LSTM(d_L, trainable=trainable, return_sequences=False, stateful=True, consume_less='gpu')(w_nmasked) w_np1E = Projection(d_W)(w_np1Ei) super(LanguageModel, self).__init__(input=[w_n, w_nmask], output=w_np1E, name='LanguageModel')
from keras.layers import LSTM, Input, Reshape from keras.models import Model from ..layers import LMMask, Projection class LanguageModel(Model): def __init__(self, n_batch, d_W, d_L, trainable=True): """ n_batch :: batch size for model application d_L :: language model state dimension (and output vector size) d_W :: input word embedding size (word features) """ w_n = Input(batch_shape=(n_batch, d_W), name='w_n', dtype='floatX') w_nmask = Input(batch_shape=(n_batch, 1), name='w_nmask', dtype='int8') # Prevent padded samples to affect internal state (and cause NaN loss in worst # case) by masking them by using w_nmask masking values w_nmasked = LMMask(0.)([Reshape((1, d_W))(w_n), w_nmask]) # Using stateful LSTM for language model - model fitting code resets the # state after each sentence w_np1Ei = LSTM(d_L, trainable=trainable, return_sequences=True, stateful=True, consume_less='gpu')(w_nmasked) w_np1Ei = LSTM(d_L, trainable=trainable, return_sequences=False, stateful=True, consume_less='gpu')(w_np1Ei) w_np1E = Projection(d_W)(w_np1Ei) super(LanguageModel, self).__init__(input=[w_n, w_nmask], output=w_np1E, name='LanguageModel')
Use two LSTM LM’s instead of single huge one
Use two LSTM LM’s instead of single huge one
Python
mit
milankinen/c2w2c,milankinen/c2w2c
--- +++ @@ -16,16 +16,21 @@ w_nmask = Input(batch_shape=(n_batch, 1), name='w_nmask', dtype='int8') # Prevent padded samples to affect internal state (and cause NaN loss in worst - # case) by masking them by using another input value + # case) by masking them by using w_nmask masking values w_nmasked = LMMask(0.)([Reshape((1, d_W))(w_n), w_nmask]) # Using stateful LSTM for language model - model fitting code resets the # state after each sentence w_np1Ei = LSTM(d_L, trainable=trainable, + return_sequences=True, + stateful=True, + consume_less='gpu')(w_nmasked) + w_np1Ei = LSTM(d_L, + trainable=trainable, return_sequences=False, stateful=True, - consume_less='gpu')(w_nmasked) + consume_less='gpu')(w_np1Ei) w_np1E = Projection(d_W)(w_np1Ei)
f16ce4235e124fa9ea5d335665221514a2fcdcce
examples/cpp/clion.py
examples/cpp/clion.py
#!/usr/bin/env python3 """This is a **proof-of-concept** CLion project generator.""" import functools import json import subprocess subprocess.check_call(['cook', '--results']) with open('results.json') as file: content = json.load(file) with open('CMakeLists.txt', 'w') as file: w = functools.partial(print, file=file) w('cmake_minimum_required(VERSION 2.8.8)') w() w('add_custom_target(COOK COMMAND cook ' 'WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})') w() outputs = {} for primary, result in content.items(): for output in result['outputs']: outputs[output] = primary for primary, result in content.items(): if result.get('type') == 'cpp.object': cpp = [file for file in result['inputs'] if file.endswith('.cpp')] w('add_library({} OBJECT {})'.format(primary, ' '.join(cpp))) defines = ' '.join(name + '=' + str(val) for name, val in result['define'].items()) if defines: w('target_compile_definitions({} PRIVATE {})' .format(primary, defines)) includes = result['include'] if includes: w('target_include_directories({} PRIVATE {})'.format( primary, ' '.join(includes) )) w()
#!/usr/bin/env python3 """This is a **proof-of-concept** CLion project generator.""" import functools import json import subprocess import sys subprocess.check_call(['cook', '--results']) with open('results.json') as file: content = json.load(file) with open('CMakeLists.txt', 'w') as file: w = functools.partial(print, file=file) w('cmake_minimum_required(VERSION 2.8.8)') w() w('add_custom_target(COOK COMMAND ' + sys.executable + ' clion.py COMMAND cook ' 'WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})') w() outputs = {} for primary, result in content.items(): for output in result['outputs']: outputs[output] = primary for primary, result in content.items(): if result.get('type') == 'cpp.object': cpp = [file for file in result['inputs'] if file.endswith('.cpp')] w('add_library({} OBJECT {})'.format(primary, ' '.join(cpp))) defines = ' '.join(name + '=' + str(val) for name, val in result['define'].items()) if defines: w('target_compile_definitions({} PRIVATE {})' .format(primary, defines)) includes = result['include'] if includes: w('target_include_directories({} PRIVATE {})'.format( primary, ' '.join(includes) )) w()
Add automatic regeneration for CLion
Add automatic regeneration for CLion
Python
mit
jachris/cook
--- +++ @@ -5,6 +5,7 @@ import functools import json import subprocess +import sys subprocess.check_call(['cook', '--results']) @@ -15,7 +16,7 @@ w = functools.partial(print, file=file) w('cmake_minimum_required(VERSION 2.8.8)') w() - w('add_custom_target(COOK COMMAND cook ' + w('add_custom_target(COOK COMMAND ' + sys.executable + ' clion.py COMMAND cook ' 'WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})') w()
c0df1342b6625cdc2a205f2ba13ee201e8d0b02a
tests/conftest.py
tests/conftest.py
from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping') mock_services_mapping_getter = mock_services_mapping_getter_patch.start() mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value mock_services_mapping_getter_patch.stop()
from __future__ import absolute_import import pytest import os import mock import json import app.mapping with open(os.path.join(os.path.dirname(__file__), 'fixtures/mappings/services.json')) as f: _services_mapping_definition = json.load(f) @pytest.fixture(scope="function") def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') yield mock_services_mapping_getter.return_value
Use with block to start/stop the patch context manager.
Use with block to start/stop the patch context manager. - this is less code, hopefully is just as clear why we need to 'yield' rather than just 'return'. https://trello.com/c/OpWI068M/380-after-g9-go-live-removal-of-old-filters-from-search-api-mapping
Python
mit
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
--- +++ @@ -16,10 +16,6 @@ def services_mapping(): """Provide a services mapping fixture, and patch it into the global singleton getter.""" - mock_services_mapping_getter_patch = mock.patch('app.mapping.get_services_mapping') - mock_services_mapping_getter = mock_services_mapping_getter_patch.start() - mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') - - yield mock_services_mapping_getter.return_value - - mock_services_mapping_getter_patch.stop() + with mock.patch('app.mapping.get_services_mapping') as mock_services_mapping_getter: + mock_services_mapping_getter.return_value = app.mapping.Mapping(_services_mapping_definition, 'services') + yield mock_services_mapping_getter.return_value
4d3ec7b3844d770925601342b0bc4a4cd6d2c852
flicks/videos/urls.py
flicks/videos/urls.py
from django.conf.urls.defaults import patterns, url from flicks.videos import views urlpatterns = patterns('', url(r'^video/(?P<video_id>[\w]+)$', views.details, name='flicks.videos.details'), url(r'^add_view/?$', views.ajax_add_view, name='flicks.videos.add_view'), url(r'^recent/?', views.recent, name='flicks.videos.recent'), url(r'^upload/?$', views.upload, name='flicks.videos.upload'), url(r'^notify$', views.notify, name='flicks.videos.notify'), url(r'^upvote/(?P<video_shortlink>[\w]+)$', views.upvote, name='flicks.videos.upvote'), url(r'^video/noir/$', views.promo_video_noir, name='flicks.videos.promo_video_noir'), url(r'^video/dance/$', views.promo_video_dance, name='flicks.videos.promo_video_dance'), url(r'^video/twilight/$', views.promo_video_twilight, name='flicks.videos.promo_video_twilight'), )
from django.conf.urls.defaults import patterns, url from flicks.videos import views urlpatterns = patterns('', url(r'^video/(?P<video_id>\d+)$', views.details, name='flicks.videos.details'), url(r'^add_view/?$', views.ajax_add_view, name='flicks.videos.add_view'), url(r'^recent/?', views.recent, name='flicks.videos.recent'), url(r'^upload/?$', views.upload, name='flicks.videos.upload'), url(r'^notify$', views.notify, name='flicks.videos.notify'), url(r'^upvote/(?P<video_shortlink>[\w]+)$', views.upvote, name='flicks.videos.upvote'), url(r'^video/noir/$', views.promo_video_noir, name='flicks.videos.promo_video_noir'), url(r'^video/dance/$', views.promo_video_dance, name='flicks.videos.promo_video_dance'), url(r'^video/twilight/$', views.promo_video_twilight, name='flicks.videos.promo_video_twilight'), )
Fix video id regex to use digits only.
Fix video id regex to use digits only.
Python
bsd-3-clause
mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks
--- +++ @@ -3,7 +3,7 @@ from flicks.videos import views urlpatterns = patterns('', - url(r'^video/(?P<video_id>[\w]+)$', views.details, + url(r'^video/(?P<video_id>\d+)$', views.details, name='flicks.videos.details'), url(r'^add_view/?$', views.ajax_add_view, name='flicks.videos.add_view'),
d613ca02bef0572d7581c843eb5466443410decf
test_settings.py
test_settings.py
import os from django.urls import ( include, path, ) BASE_DIR = os.path.dirname(__file__) STATIC_URL = "/static/" INSTALLED_APPS = ( 'gcloudc', 'djangae', 'djangae.commands', # Takes care of emulator setup 'djangae.tasks', ) DATABASES = { 'default': { 'ENGINE': 'gcloudc.db.backends.datastore', 'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"), "PROJECT": "test", "NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget } } SECRET_KEY = "secret_key_for_testing" USE_TZ = True CSRF_USE_SESSIONS = True CLOUD_TASKS_LOCATION = "[LOCATION]" # Define two required task queues CLOUD_TASKS_QUEUES = [ { "name": "default" }, { "name": "another" } ] # Point the URL conf at this file ROOT_URLCONF = __name__ urlpatterns = [ path('tasks/', include('djangae.tasks.urls')), ]
import os from django.urls import ( include, path, ) BASE_DIR = os.path.dirname(__file__) STATIC_URL = "/static/" # Default Django middleware MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'djangae.tasks.middleware.task_environment_middleware', ] INSTALLED_APPS = ( 'django.contrib.sessions', 'gcloudc', 'djangae', 'djangae.commands', # Takes care of emulator setup 'djangae.tasks', ) DATABASES = { 'default': { 'ENGINE': 'gcloudc.db.backends.datastore', 'INDEXES_FILE': os.path.join(os.path.abspath(os.path.dirname(__file__)), "djangaeidx.yaml"), "PROJECT": "test", "NAMESPACE": "ns1", # Use a non-default namespace to catch edge cases where we forget } } SECRET_KEY = "secret_key_for_testing" USE_TZ = True CSRF_USE_SESSIONS = True CLOUD_TASKS_LOCATION = "[LOCATION]" # Define two required task queues CLOUD_TASKS_QUEUES = [ { "name": "default" }, { "name": "another" } ] # Point the URL conf at this file ROOT_URLCONF = __name__ urlpatterns = [ path('tasks/', include('djangae.tasks.urls')), ]
Set default Django middleware in test settings
Set default Django middleware in test settings
Python
bsd-3-clause
potatolondon/djangae,potatolondon/djangae
--- +++ @@ -8,7 +8,19 @@ BASE_DIR = os.path.dirname(__file__) STATIC_URL = "/static/" +# Default Django middleware +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'djangae.tasks.middleware.task_environment_middleware', +] + INSTALLED_APPS = ( + 'django.contrib.sessions', 'gcloudc', 'djangae', 'djangae.commands', # Takes care of emulator setup
2bef67ad0a4fb0db4bdf11d24b3c63e37558e7b9
poker/_common.py
poker/_common.py
import random from enum import Enum from enum34_custom import _MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum from types import DynamicClassAttribute class _MultiMeta(_MultiValueMeta): def make_random(cls): return random.choice(list(cls)) class _MultiValueEnum(OrderableMixin, Enum, metaclass=_MultiMeta): def __str__(self): return str(self.value) def __repr__(self): apostrophe = "'" if isinstance(self.value, str) else '' return "{0}({1}{2}{1})".format(self.__class__.__qualname__, apostrophe, self) @DynamicClassAttribute def value(self): """The value of the Enum member.""" return self._value_[0] class _CaseInsensitiveMultiValueEnum(CaseInsensitiveMultiValueEnum): def __str__(self): return str(self.value[0]) class _ReprMixin: def __repr__(self): return "{}('{}')".format(self.__class__.__qualname__, self) def _make_float(string): return float(string.strip().replace(',', '')) def _make_int(string): return int(string.strip().replace(',', ''))
import random from enum import Enum from enum34_custom import ( _MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum, MultiValueEnum ) from types import DynamicClassAttribute class _RandomMultiValueMeta(_MultiValueMeta): def make_random(cls): return random.choice(list(cls)) class _MultiValueEnum(OrderableMixin, MultiValueEnum, metaclass=_RandomMultiValueMeta): def __str__(self): return str(self.value) def __repr__(self): apostrophe = "'" if isinstance(self.value, str) else '' return "{0}({1}{2}{1})".format(self.__class__.__qualname__, apostrophe, self) @DynamicClassAttribute def value(self): """The value of the Enum member.""" return self._value_[0] class _CaseInsensitiveMultiValueEnum(CaseInsensitiveMultiValueEnum): def __str__(self): return str(self.value[0]) class _ReprMixin: def __repr__(self): return "{}('{}')".format(self.__class__.__qualname__, self) def _make_float(string): return float(string.strip().replace(',', '')) def _make_int(string): return int(string.strip().replace(',', ''))
Clarify what _MultiVAlueEnum does and where it comes from.
Clarify what _MultiVAlueEnum does and where it comes from.
Python
mit
pokerregion/poker,Seanmcn/poker,marchon/poker
--- +++ @@ -1,15 +1,17 @@ import random from enum import Enum -from enum34_custom import _MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum +from enum34_custom import ( + _MultiValueMeta, OrderableMixin, CaseInsensitiveMultiValueEnum, MultiValueEnum +) from types import DynamicClassAttribute -class _MultiMeta(_MultiValueMeta): +class _RandomMultiValueMeta(_MultiValueMeta): def make_random(cls): return random.choice(list(cls)) -class _MultiValueEnum(OrderableMixin, Enum, metaclass=_MultiMeta): +class _MultiValueEnum(OrderableMixin, MultiValueEnum, metaclass=_RandomMultiValueMeta): def __str__(self): return str(self.value)
62a55c9e4c46aac647e0c7bc3d8143f1a6bd41ca
groups/admin.py
groups/admin.py
from django.contrib import admin from .models import Discussion, Group admin.site.register(Discussion) admin.site.register(Group)
from django.contrib import admin from .models import Discussion, Group class GroupAdmin(admin.ModelAdmin): filter_horizontal = ('moderators', 'watchers', 'members_if_private') class Meta: model = Group class DiscussionAdmin(admin.ModelAdmin): filter_horizontal = ('subscribers', 'ignorers') class Meta: model = Discussion admin.site.register(Group, GroupAdmin) admin.site.register(Discussion, DiscussionAdmin)
Use filter_horizontal for many-to-many fields.
Use filter_horizontal for many-to-many fields.
Python
bsd-2-clause
incuna/incuna-groups,incuna/incuna-groups
--- +++ @@ -3,5 +3,19 @@ from .models import Discussion, Group -admin.site.register(Discussion) -admin.site.register(Group) +class GroupAdmin(admin.ModelAdmin): + filter_horizontal = ('moderators', 'watchers', 'members_if_private') + + class Meta: + model = Group + + +class DiscussionAdmin(admin.ModelAdmin): + filter_horizontal = ('subscribers', 'ignorers') + + class Meta: + model = Discussion + + +admin.site.register(Group, GroupAdmin) +admin.site.register(Discussion, DiscussionAdmin)
992e39a6e669fd448034fe4d844b1bcc87d75721
comics/comics/oots.py
comics/comics/oots.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Order of the Stick" language = "en" url = "http://www.giantitp.com/" start_date = "2003-09-30" rights = "Rich Burlew" class Crawler(CrawlerBase): history_capable_days = 1 time_zone = "US/Eastern" def crawl(self, pub_date): feed = self.parse_feed("http://www.giantitp.com/comics/oots.rss") if len(feed.all()): entry = feed.all()[0] page = self.parse_page(entry.link) url = page.src('img[src*="/comics/images/"]') title = entry.title return CrawlerImage(url, title)
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.core.comic_data import ComicDataBase class ComicData(ComicDataBase): name = "The Order of the Stick" language = "en" url = "http://www.giantitp.com/" start_date = "2003-09-30" rights = "Rich Burlew" class Crawler(CrawlerBase): history_capable_days = 10 time_zone = "US/Eastern" headers = {"User-Agent": "Mozilla/5.0"} def crawl(self, pub_date): feed = self.parse_feed("http://www.giantitp.com/comics/oots.rss") if len(feed.all()): entry = feed.all()[0] page = self.parse_page(entry.link) url = page.src('img[src*="/comics/oots/"]') title = entry.title return CrawlerImage(url, title)
Update "The Order of the Stick" after site change
Update "The Order of the Stick" after site change
Python
agpl-3.0
datagutten/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics
--- +++ @@ -11,14 +11,15 @@ class Crawler(CrawlerBase): - history_capable_days = 1 + history_capable_days = 10 time_zone = "US/Eastern" + headers = {"User-Agent": "Mozilla/5.0"} def crawl(self, pub_date): feed = self.parse_feed("http://www.giantitp.com/comics/oots.rss") if len(feed.all()): entry = feed.all()[0] page = self.parse_page(entry.link) - url = page.src('img[src*="/comics/images/"]') + url = page.src('img[src*="/comics/oots/"]') title = entry.title return CrawlerImage(url, title)
030e129fd60b5ab2255b10e8115ab4e3e973ae05
utils/gyb_syntax_support/protocolsMap.py
utils/gyb_syntax_support/protocolsMap.py
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = { 'DeclBuildable': [ 'CodeBlockItem', 'MemberDeclListItem', 'SyntaxBuildable' ], 'ExprList': [ 'ConditionElement', 'SyntaxBuildable' ], 'IdentifierPattern': [ 'PatternBuildable' ], 'MemberDeclList': [ 'MemberDeclBlock' ], 'SimpleTypeIdentifier': [ 'TypeAnnotation', 'TypeBuildable', 'TypeExpr' ], 'StmtBuildable': [ 'CodeBlockItem', 'SyntaxBuildable' ], 'TokenSyntax': [ 'BinaryOperatorExpr' ] }
SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = { 'AccessorList': [ 'AccessorBlock' ], 'CodeBlockItemList': [ 'CodeBlock' ], 'DeclBuildable': [ 'CodeBlockItem', 'MemberDeclListItem', 'SyntaxBuildable' ], 'ExprList': [ 'ConditionElement', 'SyntaxBuildable' ], 'IdentifierPattern': [ 'PatternBuildable' ], 'MemberDeclList': [ 'MemberDeclBlock' ], 'FunctionCallExpr': [ 'CodeBlockItem', 'ExprBuildable' ], 'SequenceExpr': [ 'CodeBlockItem', 'ExprBuildable', 'TupleExprElement' ], 'SimpleTypeIdentifier': [ 'TypeAnnotation', 'TypeBuildable', 'TypeExpr' ], 'StmtBuildable': [ 'CodeBlockItem', 'SyntaxBuildable' ], 'TokenSyntax': [ 'BinaryOperatorExpr', 'DeclModifier' ] }
Add more types in protocol map
Add more types in protocol map
Python
apache-2.0
JGiola/swift,apple/swift,JGiola/swift,glessard/swift,apple/swift,ahoppen/swift,JGiola/swift,ahoppen/swift,ahoppen/swift,roambotics/swift,apple/swift,benlangmuir/swift,atrick/swift,atrick/swift,rudkx/swift,gregomni/swift,JGiola/swift,rudkx/swift,benlangmuir/swift,roambotics/swift,rudkx/swift,benlangmuir/swift,gregomni/swift,JGiola/swift,gregomni/swift,gregomni/swift,atrick/swift,roambotics/swift,glessard/swift,glessard/swift,benlangmuir/swift,rudkx/swift,ahoppen/swift,rudkx/swift,rudkx/swift,atrick/swift,glessard/swift,ahoppen/swift,glessard/swift,benlangmuir/swift,ahoppen/swift,gregomni/swift,apple/swift,JGiola/swift,apple/swift,glessard/swift,apple/swift,atrick/swift,gregomni/swift,roambotics/swift,benlangmuir/swift,atrick/swift,roambotics/swift,roambotics/swift
--- +++ @@ -1,4 +1,10 @@ SYNTAX_BUILDABLE_EXPRESSIBLE_AS_CONFORMANCES = { + 'AccessorList': [ + 'AccessorBlock' + ], + 'CodeBlockItemList': [ + 'CodeBlock' + ], 'DeclBuildable': [ 'CodeBlockItem', 'MemberDeclListItem', @@ -14,6 +20,15 @@ 'MemberDeclList': [ 'MemberDeclBlock' ], + 'FunctionCallExpr': [ + 'CodeBlockItem', + 'ExprBuildable' + ], + 'SequenceExpr': [ + 'CodeBlockItem', + 'ExprBuildable', + 'TupleExprElement' + ], 'SimpleTypeIdentifier': [ 'TypeAnnotation', 'TypeBuildable', @@ -24,6 +39,7 @@ 'SyntaxBuildable' ], 'TokenSyntax': [ - 'BinaryOperatorExpr' + 'BinaryOperatorExpr', + 'DeclModifier' ] }
0cf4aff4702ad580f9709b33c96cd115f34b028d
spacy/tests/conftest.py
spacy/tests/conftest.py
import pytest import os from ..en import English from ..de import German @pytest.fixture(scope="session") def EN(): return English(path=False) @pytest.fixture(scope="session") def DE(): return German(path=False) def pytest_addoption(parser): parser.addoption("--models", action="store_true", help="include tests that require full models") parser.addoption("--vectors", action="store_true", help="include word vectors tests") parser.addoption("--slow", action="store_true", help="include slow tests") def pytest_runtest_setup(item): for opt in ['models', 'vectors', 'slow']: if opt in item.keywords and not item.config.getoption("--%s" % opt): pytest.skip("need --%s option to run" % opt)
import pytest import os from ..en import English from ..de import German @pytest.fixture(scope="session") def EN(): return English() @pytest.fixture(scope="session") def DE(): return German() def pytest_addoption(parser): parser.addoption("--models", action="store_true", help="include tests that require full models") parser.addoption("--vectors", action="store_true", help="include word vectors tests") parser.addoption("--slow", action="store_true", help="include slow tests") def pytest_runtest_setup(item): for opt in ['models', 'vectors', 'slow']: if opt in item.keywords and not item.config.getoption("--%s" % opt): pytest.skip("need --%s option to run" % opt)
Set default path in EN/DE tests.
Set default path in EN/DE tests.
Python
mit
Gregory-Howard/spaCy,explosion/spaCy,raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,banglakit/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,spacy-io/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,Gregory-Howard/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,raphael0202/spaCy,raphael0202/spaCy,raphael0202/spaCy,banglakit/spaCy,banglakit/spaCy,aikramer2/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,recognai/spaCy,Gregory-Howard/spaCy,recognai/spaCy,oroszgy/spaCy.hu,Gregory-Howard/spaCy,aikramer2/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,honnibal/spaCy
--- +++ @@ -7,11 +7,11 @@ @pytest.fixture(scope="session") def EN(): - return English(path=False) + return English() @pytest.fixture(scope="session") def DE(): - return German(path=False) + return German() def pytest_addoption(parser):
edbbf93222fc4061a18f81718a6a7233c6b840ec
tests/test_callbacks.py
tests/test_callbacks.py
import pytest from aiotg import TgBot from aiotg import MESSAGE_TYPES API_TOKEN = "test_token" def text_msg(text): return { "message_id": 0, "from": {}, "chat": { "id": 0, "type": "private" }, "text": text } def test_command(): bot = TgBot(API_TOKEN) called_with = None @bot.command(r"/echo (.+)") def echo(chat, match): nonlocal called_with called_with = match.group(1) bot._process_message(text_msg("/echo foo")) assert called_with == "foo" def test_default(): bot = TgBot(API_TOKEN) called_with = None @bot.default def default(chat, message): nonlocal called_with called_with = message["text"] bot._process_message(text_msg("foo bar")) assert called_with == "foo bar"
import pytest import random from aiotg import TgBot from aiotg import MESSAGE_TYPES API_TOKEN = "test_token" bot = TgBot(API_TOKEN) def custom_msg(msg): template = { "message_id": 0, "from": {}, "chat": { "id": 0, "type": "private" } } template.update(msg) return template def text_msg(text): return custom_msg({ "text": text }) def test_command(): called_with = None @bot.command(r"/echo (.+)") def echo(chat, match): nonlocal called_with called_with = match.group(1) bot._process_message(text_msg("/echo foo")) assert called_with == "foo" def test_default(): called_with = None @bot.default def default(chat, message): nonlocal called_with called_with = message["text"] bot._process_message(text_msg("foo bar")) assert called_with == "foo bar" @pytest.mark.parametrize("mt", MESSAGE_TYPES) def test_handle(mt): called_with = None @bot.handle(mt) def handle(chat, media): nonlocal called_with called_with = media value = random.random() bot._process_message(custom_msg({ mt: value })) assert called_with == value
Add test for media handlers
Add test for media handlers
Python
mit
SijmenSchoon/aiotg,szastupov/aiotg,derfenix/aiotg
--- +++ @@ -1,19 +1,27 @@ import pytest +import random + from aiotg import TgBot from aiotg import MESSAGE_TYPES API_TOKEN = "test_token" +bot = TgBot(API_TOKEN) + +def custom_msg(msg): + template = { + "message_id": 0, + "from": {}, + "chat": { "id": 0, "type": "private" } + } + template.update(msg) + return template + def text_msg(text): - return { - "message_id": 0, - "from": {}, - "chat": { "id": 0, "type": "private" }, - "text": text - } + return custom_msg({ "text": text }) + def test_command(): - bot = TgBot(API_TOKEN) called_with = None @bot.command(r"/echo (.+)") @@ -22,11 +30,10 @@ called_with = match.group(1) bot._process_message(text_msg("/echo foo")) - assert called_with == "foo" + def test_default(): - bot = TgBot(API_TOKEN) called_with = None @bot.default @@ -35,5 +42,18 @@ called_with = message["text"] bot._process_message(text_msg("foo bar")) + assert called_with == "foo bar" - assert called_with == "foo bar" + [email protected]("mt", MESSAGE_TYPES) +def test_handle(mt): + called_with = None + + @bot.handle(mt) + def handle(chat, media): + nonlocal called_with + called_with = media + + value = random.random() + bot._process_message(custom_msg({ mt: value })) + assert called_with == value
e4c33d57ddc9743cec1c43bf0bb8d6fae185aff7
ticketus/urls.py
ticketus/urls.py
from django.conf import settings from django.conf.urls import patterns, include from django.contrib import admin urlpatterns = patterns('', (r'^ticket/', include('ticketus.ui.urls')), (r'^grappelli/', include('grappelli.urls')), (r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: import debug_toolbar urlpatterns += patterns('', (r'^__debug__/', include(debug_toolbar.urls)), ) # Serve media files in development. Note Django automatically serves # static files as the staticfiles app is active in settings.py. from django.conf.urls.static import static urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
from django.conf import settings from django.conf.urls import patterns, include, url from django.contrib import admin from django.views.generic.base import RedirectView urlpatterns = patterns('', url(r'^$', RedirectView.as_view(pattern_name='ticket_list', permanent=False)), (r'^ticket/', include('ticketus.ui.urls')), (r'^grappelli/', include('grappelli.urls')), (r'^admin/', include(admin.site.urls)), ) if settings.DEBUG: import debug_toolbar urlpatterns += patterns('', (r'^__debug__/', include(debug_toolbar.urls)), ) # Serve media files in development. Note Django automatically serves # static files as the staticfiles app is active in settings.py. from django.conf.urls.static import static urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Add CBV for redirecting / to /ticket
Add CBV for redirecting / to /ticket
Python
bsd-2-clause
sjkingo/ticketus,sjkingo/ticketus,sjkingo/ticketus,sjkingo/ticketus
--- +++ @@ -1,8 +1,10 @@ from django.conf import settings -from django.conf.urls import patterns, include +from django.conf.urls import patterns, include, url from django.contrib import admin +from django.views.generic.base import RedirectView urlpatterns = patterns('', + url(r'^$', RedirectView.as_view(pattern_name='ticket_list', permanent=False)), (r'^ticket/', include('ticketus.ui.urls')), (r'^grappelli/', include('grappelli.urls')), (r'^admin/', include(admin.site.urls)),
d13363dd2fa23abcbd23d62929a00c07eb175eb7
tmaps/config/default.py
tmaps/config/default.py
import datetime # Override this key with a secret one SECRET_KEY = 'default_secret_key' HASHIDS_SALT = 'default_secret_salt' # This should be set to true in the production config when using NGINX USE_X_SENDFILE = False DEBUG = True JWT_EXPIRATION_DELTA = datetime.timedelta(days=2) JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0) POSTGRES_DB_USER = None POSTGRES_DB_PASSWORD = None POSTGRES_DB_NAME = None POSTGRES_DB_HOST = None POSTGRES_DB_PORT = None REDIS_URL = 'redis://localhost:6379' SQLALCHEMY_TRACK_MODIFICATIONS = True
import datetime # Override this key with a secret one SECRET_KEY = 'default_secret_key' HASHIDS_SALT = 'default_secret_salt' # This should be set to true in the production config when using NGINX USE_X_SENDFILE = False DEBUG = True JWT_EXPIRATION_DELTA = datetime.timedelta(days=2) JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0) SQLALCHEMY_DATABASE_URI = None REDIS_URL = 'redis://localhost:6379' SQLALCHEMY_TRACK_MODIFICATIONS = True
Make URI the only db config entry
Make URI the only db config entry
Python
agpl-3.0
TissueMAPS/TmServer
--- +++ @@ -11,11 +11,7 @@ JWT_EXPIRATION_DELTA = datetime.timedelta(days=2) JWT_NOT_BEFORE_DELTA = datetime.timedelta(seconds=0) -POSTGRES_DB_USER = None -POSTGRES_DB_PASSWORD = None -POSTGRES_DB_NAME = None -POSTGRES_DB_HOST = None -POSTGRES_DB_PORT = None +SQLALCHEMY_DATABASE_URI = None REDIS_URL = 'redis://localhost:6379'
29f6a260e49a6955dd12d354400d9ee6cfd6ddc7
tests/qtcore/qstatemachine_test.py
tests/qtcore/qstatemachine_test.py
#!/usr/bin/python import unittest from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation class QStateMachineTest(unittest.TestCase): def cb(self, *args): self.assertEqual(self.machine.defaultAnimations(), [self.anim]) def testBasic(self): app = QCoreApplication([]) self.machine = QStateMachine() s1 = QState() s2 = QState() s3 = QFinalState() QObject.connect(self.machine, SIGNAL("started()"), self.cb) self.anim = QParallelAnimationGroup() self.machine.addState(s1) self.machine.addState(s2) self.machine.addState(s3) self.machine.setInitialState(s1) self.machine.addDefaultAnimation(self.anim) self.machine.start() QTimer.singleShot(100, app.quit) app.exec_() if __name__ == '__main__': unittest.main()
#!/usr/bin/python import unittest from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation from helper import UsesQCoreApplication class QStateMachineTest(UsesQCoreApplication): def cb(self, *args): self.assertEqual(self.machine.defaultAnimations(), [self.anim]) def testBasic(self): self.machine = QStateMachine() s1 = QState() s2 = QState() s3 = QFinalState() QObject.connect(self.machine, SIGNAL("started()"), self.cb) self.anim = QParallelAnimationGroup() self.machine.addState(s1) self.machine.addState(s2) self.machine.addState(s3) self.machine.setInitialState(s1) self.machine.addDefaultAnimation(self.anim) self.machine.start() QTimer.singleShot(100, self.app.quit) self.app.exec_() if __name__ == '__main__': unittest.main()
Add UsesQCoreApplication in state machine test
Add UsesQCoreApplication in state machine test
Python
lgpl-2.1
M4rtinK/pyside-bb10,enthought/pyside,M4rtinK/pyside-android,PySide/PySide,IronManMark20/pyside2,PySide/PySide,M4rtinK/pyside-bb10,RobinD42/pyside,BadSingleton/pyside2,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,pankajp/pyside,pankajp/pyside,M4rtinK/pyside-android,PySide/PySide,BadSingleton/pyside2,gbaty/pyside2,qtproject/pyside-pyside,enthought/pyside,enthought/pyside,RobinD42/pyside,pankajp/pyside,pankajp/pyside,enthought/pyside,M4rtinK/pyside-android,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,M4rtinK/pyside-android,qtproject/pyside-pyside,gbaty/pyside2,RobinD42/pyside,BadSingleton/pyside2,RobinD42/pyside,enthought/pyside,RobinD42/pyside,gbaty/pyside2,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,BadSingleton/pyside2,pankajp/pyside,M4rtinK/pyside-android,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,gbaty/pyside2,RobinD42/pyside,qtproject/pyside-pyside
--- +++ @@ -2,14 +2,14 @@ import unittest from PySide.QtCore import QObject, QState, QFinalState, SIGNAL, QCoreApplication, QTimer, QStateMachine, QSignalTransition, QVariant, QParallelAnimationGroup, QPropertyAnimation -class QStateMachineTest(unittest.TestCase): +from helper import UsesQCoreApplication + +class QStateMachineTest(UsesQCoreApplication): def cb(self, *args): self.assertEqual(self.machine.defaultAnimations(), [self.anim]) def testBasic(self): - app = QCoreApplication([]) - self.machine = QStateMachine() s1 = QState() s2 = QState() @@ -26,8 +26,8 @@ self.machine.addDefaultAnimation(self.anim) self.machine.start() - QTimer.singleShot(100, app.quit) - app.exec_() + QTimer.singleShot(100, self.app.quit) + self.app.exec_() if __name__ == '__main__': unittest.main()
5eefa21699f2dc7b75a919b5899a25ec7ef5c5b7
tests/unit/test_adapter_session.py
tests/unit/test_adapter_session.py
import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2
import pytest from wagtail_personalisation import adapters from tests.factories.segment import SegmentFactory @pytest.mark.django_db def test_get_segments(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) assert len(request.session['segments']) == 2 segments = adapter.get_segments() assert segments == [segment_1, segment_2] @pytest.mark.django_db def test_get_segment_by_id(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 @pytest.mark.django_db def test_refresh_removes_disabled(rf, monkeypatch): request = rf.get('/') adapter = adapters.SessionSegmentsAdapter(request) segment_1 = SegmentFactory(name='segment-1', persistent=True) segment_2 = SegmentFactory(name='segment-2', persistent=True) adapter.set_segments([segment_1, segment_2]) adapter = adapters.SessionSegmentsAdapter(request) segment_1.status = segment_1.STATUS_DISABLED segment_1.save() adapter.refresh() assert adapter.get_segments() == [segment_2]
Add test for sessionadapter.refresh when segment is disable
Add test for sessionadapter.refresh when segment is disable
Python
mit
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
--- +++ @@ -33,3 +33,22 @@ segment_x = adapter.get_segment_by_id(segment_2.pk) assert segment_x == segment_2 + + [email protected]_db +def test_refresh_removes_disabled(rf, monkeypatch): + request = rf.get('/') + + adapter = adapters.SessionSegmentsAdapter(request) + + segment_1 = SegmentFactory(name='segment-1', persistent=True) + segment_2 = SegmentFactory(name='segment-2', persistent=True) + + adapter.set_segments([segment_1, segment_2]) + + adapter = adapters.SessionSegmentsAdapter(request) + segment_1.status = segment_1.STATUS_DISABLED + segment_1.save() + adapter.refresh() + + assert adapter.get_segments() == [segment_2]
5fb365333711f7e999f71d53061ae14c386e575c
src/waldur_core/core/api_groups_mapping.py
src/waldur_core/core/api_groups_mapping.py
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], }
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
Add accounting group to apidocs
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
--- +++ @@ -31,4 +31,5 @@ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], + 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
563645019adaf20ee66af0b4cc13e8b08bcc9d32
lino_noi/lib/tickets/__init__.py
lino_noi/lib/tickets/__init__.py
# -*- coding: UTF-8 -*- # Copyright 2016 Luc Saffre # License: BSD (see file COPYING for details) """Fixtures specific for the Team variant of Lino Noi. .. autosummary:: :toctree: models """ from lino_xl.lib.tickets import * class Plugin(Plugin): """Adds the :mod:`lino_xl.lib.votes` plugin. """ extends_models = ['Ticket'] needs_plugins = [ 'lino_xl.lib.excerpts', 'lino_xl.lib.topics', 'lino.modlib.comments', 'lino.modlib.changes', # 'lino_xl.lib.votes', 'lino_noi.lib.noi'] def setup_main_menu(self, site, profile, m): p = self.get_menu_group() m = m.add_menu(p.app_label, p.verbose_name) m.add_action('tickets.MyTicketsToWork') def get_dashboard_items(self, user): if user.authenticated: yield self.site.actors.tickets.MyTicketsToWork # else: # yield self.site.actors.tickets. PublicTickets
# -*- coding: UTF-8 -*- # Copyright 2016 Luc Saffre # License: BSD (see file COPYING for details) """Fixtures specific for the Team variant of Lino Noi. .. autosummary:: :toctree: models """ from lino_xl.lib.tickets import * class Plugin(Plugin): """Adds the :mod:`lino_xl.lib.votes` plugin. """ extends_models = ['Ticket'] needs_plugins = [ 'lino_xl.lib.excerpts', 'lino_xl.lib.topics', 'lino.modlib.comments', 'lino.modlib.changes', # 'lino_xl.lib.votes', 'lino_noi.lib.noi'] def setup_main_menu(self, site, profile, m): super(Plugin, self).setup_main_menu(site, profile, m) p = self.get_menu_group() m = m.add_menu(p.app_label, p.verbose_name) m.add_action('tickets.MyTicketsToWork') def get_dashboard_items(self, user): super(Plugin, self).get_dashboard_items(user) if user.authenticated: yield self.site.actors.tickets.MyTicketsToWork # else: # yield self.site.actors.tickets. PublicTickets
Fix menu items for noi/tickets
Fix menu items for noi/tickets
Python
bsd-2-clause
khchine5/noi,lsaffre/noi,lsaffre/noi,lino-framework/noi,lino-framework/noi,khchine5/noi,lsaffre/noi
--- +++ @@ -27,12 +27,14 @@ 'lino_noi.lib.noi'] def setup_main_menu(self, site, profile, m): + super(Plugin, self).setup_main_menu(site, profile, m) p = self.get_menu_group() m = m.add_menu(p.app_label, p.verbose_name) m.add_action('tickets.MyTicketsToWork') def get_dashboard_items(self, user): + super(Plugin, self).get_dashboard_items(user) if user.authenticated: yield self.site.actors.tickets.MyTicketsToWork # else:
a5cd2110283ba699f36548c42b83aa86e6b50aab
configuration.py
configuration.py
# -*- coding: utf-8 -*- """ configuration.py """ from trytond.model import fields, ModelSingleton, ModelSQL, ModelView __all__ = ['EndiciaConfiguration'] class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView): """ Configuration settings for Endicia. """ __name__ = 'endicia.configuration' account_id = fields.Integer('Account Id') requester_id = fields.Char('Requester Id') passphrase = fields.Char('Passphrase') is_test = fields.Boolean('Is Test') @classmethod def __setup__(cls): super(EndiciaConfiguration, cls).__setup__() cls._error_messages.update({ 'endicia_credentials_required': 'Endicia settings on endicia configuration are incomplete.', }) def get_endicia_credentials(self): """Validate if endicia credentials are complete. """ if not all([ self.account_id, self.requester_id, self.passphrase ]): self.raise_user_error('endicia_credentials_required') return self
# -*- coding: utf-8 -*- """ configuration.py """ from trytond import backend from trytond.model import fields, ModelSingleton, ModelSQL, ModelView from trytond.transaction import Transaction __all__ = ['EndiciaConfiguration'] class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView): """ Configuration settings for Endicia. """ __name__ = 'endicia.configuration' account_id = fields.Char('Account Id') requester_id = fields.Char('Requester Id') passphrase = fields.Char('Passphrase') is_test = fields.Boolean('Is Test') @classmethod def __setup__(cls): super(EndiciaConfiguration, cls).__setup__() cls._error_messages.update({ 'endicia_credentials_required': 'Endicia settings on endicia configuration are incomplete.', }) @classmethod def __register__(cls, module_name): TableHandler = backend.get('TableHandler') cursor = Transaction().cursor # Migration from 3.4.0.6 : Migrate account_id field to string if backend.name() == 'postgresql': cursor.execute( 'SELECT pg_typeof("account_id") ' 'FROM endicia_configuration ' 'LIMIT 1', ) # Check if account_id is integer field is_integer = cursor.fetchone()[0] == 'integer' if is_integer: # Migrate integer field to string table = TableHandler(cursor, cls, module_name) table.alter_type('account_id', 'varchar') super(EndiciaConfiguration, cls).__register__(module_name) def get_endicia_credentials(self): """Validate if endicia credentials are complete. """ if not all([ self.account_id, self.requester_id, self.passphrase ]): self.raise_user_error('endicia_credentials_required') return self
Migrate account_id from integer field to char field
Migrate account_id from integer field to char field
Python
bsd-3-clause
priyankarani/trytond-shipping-endicia,fulfilio/trytond-shipping-endicia,prakashpp/trytond-shipping-endicia
--- +++ @@ -3,7 +3,9 @@ configuration.py """ +from trytond import backend from trytond.model import fields, ModelSingleton, ModelSQL, ModelView +from trytond.transaction import Transaction __all__ = ['EndiciaConfiguration'] @@ -14,7 +16,7 @@ """ __name__ = 'endicia.configuration' - account_id = fields.Integer('Account Id') + account_id = fields.Char('Account Id') requester_id = fields.Char('Requester Id') passphrase = fields.Char('Passphrase') is_test = fields.Boolean('Is Test') @@ -27,6 +29,29 @@ 'Endicia settings on endicia configuration are incomplete.', }) + @classmethod + def __register__(cls, module_name): + TableHandler = backend.get('TableHandler') + cursor = Transaction().cursor + + # Migration from 3.4.0.6 : Migrate account_id field to string + if backend.name() == 'postgresql': + cursor.execute( + 'SELECT pg_typeof("account_id") ' + 'FROM endicia_configuration ' + 'LIMIT 1', + ) + + # Check if account_id is integer field + is_integer = cursor.fetchone()[0] == 'integer' + + if is_integer: + # Migrate integer field to string + table = TableHandler(cursor, cls, module_name) + table.alter_type('account_id', 'varchar') + + super(EndiciaConfiguration, cls).__register__(module_name) + def get_endicia_credentials(self): """Validate if endicia credentials are complete. """
9d26f1d7a23e69bf7beae78674eb3ba6511e3f28
my_app/views.py
my_app/views.py
from django.http import HttpResponse from django.shortcuts import render def func(request): return HttpResponse("This is my first django project!") def add(request): a = request.GET['a'] b = request.GET['b'] c = int(a) + int(b) return HttpResponse(str(c)) def index(request): return render(request, 'home.html')
from django.http import HttpResponse from django.shortcuts import render def func(request): return HttpResponse("This is my first django project!") def add(request): a = request.GET['a'] b = request.GET['b'] c = int(a) + int(b) return HttpResponse(str(c)) def index(request): return render(request, 'home.html')
Test Pycharm version control plugin.
Test Pycharm version control plugin.
Python
apache-2.0
wmh-demos/django-first-demo,wmh-demos/django-first-demo
--- +++ @@ -15,3 +15,5 @@ def index(request): return render(request, 'home.html') + +
819f36493e1e0112c3bbe4f92f87f1771cc4af3f
moa/base.py
moa/base.py
''' * when dispatching events, returning True stops it. ''' from weakref import ref from kivy.event import EventDispatcher from kivy.properties import StringProperty, OptionProperty, ObjectProperty import logging class MoaException(Exception): pass class MoaBase(EventDispatcher): named_moas = {} ''' A weakref.ref to the named moa instances. Read only. ''' _last_name = '' def __init__(self, **kwargs): super(MoaBase, self).__init__(**kwargs) def verfiy_name(instance, value): named_moas = MoaBase.named_moas old_name = self._last_name if value == old_name: return if old_name: del named_moas[old_name] if value: if value in named_moas and named_moas[value]() is not None: raise ValueError('Moa instance with name {} already ' 'exists: {}'.format(value, named_moas[value]())) else: named_moas[value] = ref(self) self._last_name = value self.bind(name=verfiy_name) verfiy_name(self, self.name) name = StringProperty('') ''' Unique name across all Moa objects ''' logger = ObjectProperty(logging.getLogger('moa'), baseclass=logging.Logger) source = StringProperty('') ''' E.g. a filename to load that interpreted by the subclass. '''
''' * when dispatching events, returning True stops it. ''' __all__ = ('MoaBase', ) from weakref import ref from kivy.event import EventDispatcher from kivy.properties import StringProperty, OptionProperty, ObjectProperty import logging class MoaBase(EventDispatcher): named_moas = {} ''' A weakref.ref to the named moa instances. Read only. ''' _last_name = '' def __init__(self, **kwargs): super(MoaBase, self).__init__(**kwargs) def verfiy_name(instance, value): named_moas = MoaBase.named_moas old_name = self._last_name if value == old_name: return if old_name: del named_moas[old_name] if value: if value in named_moas and named_moas[value]() is not None: raise ValueError('Moa instance with name {} already ' 'exists: {}'.format(value, named_moas[value]())) else: named_moas[value] = ref(self) self._last_name = value self.bind(name=verfiy_name) verfiy_name(self, self.name) name = StringProperty('') ''' Unique name across all Moa objects ''' logger = ObjectProperty(logging.getLogger('moa'), baseclass=logging.Logger) source = StringProperty('') ''' E.g. a filename to load that interpreted by the subclass. '''
Remove unused moa exception class.
Remove unused moa exception class.
Python
mit
matham/moa
--- +++ @@ -1,16 +1,14 @@ ''' * when dispatching events, returning True stops it. ''' + +__all__ = ('MoaBase', ) from weakref import ref from kivy.event import EventDispatcher from kivy.properties import StringProperty, OptionProperty, ObjectProperty import logging - - -class MoaException(Exception): - pass class MoaBase(EventDispatcher):
30bfdd3a6e31db6849f650406bc8014f836dda62
yolk/__init__.py
yolk/__init__.py
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.6.2'
"""yolk. Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.7'
Increment minor version to 0.7
Increment minor version to 0.7
Python
bsd-3-clause
myint/yolk,myint/yolk
--- +++ @@ -6,4 +6,4 @@ """ -__version__ = '0.6.2' +__version__ = '0.7'
166c002f9129c9c244532f8d490b55a884c6708b
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
mla_game/apps/transcript/management/commands/fake_game_one_gameplay.py
import random from django.core.management.base import BaseCommand from django.contrib.auth.models import User from ...models import ( Transcript, TranscriptPhraseVote ) from ...tasks import update_transcript_stats class Command(BaseCommand): help = 'Creates random votes for 5 phrases in a random transcript' def handle(self, *args, **options): users = User.objects.all() transcript = Transcript.objects.random_transcript(in_progress=False).first() phrases = transcript.phrases.all()[:5] for phrase in phrases: for user in users: TranscriptPhraseVote.objects.create( transcript_phrase=phrase, user=user, upvote=random.choice([True, False]) ) update_transcript_stats(transcript)
import random from django.core.management.base import BaseCommand from django.contrib.auth.models import User from ...models import ( Transcript, TranscriptPhraseVote ) from ...tasks import update_transcript_stats class Command(BaseCommand): help = 'Creates random votes for 5 phrases in a random transcript' def handle(self, *args, **options): users = User.objects.all()[:5] transcript = Transcript.objects.random_transcript( in_progress=False ).first() phrases = transcript.phrases.all()[:5] for phrase in phrases: for user in users: TranscriptPhraseVote.objects.create( transcript_phrase=phrase, user=user, upvote=random.choice([True, False]) ) update_transcript_stats(transcript)
Use a smaller set of users in fake game one gameplay
Use a smaller set of users in fake game one gameplay
Python
mit
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
--- +++ @@ -13,8 +13,10 @@ help = 'Creates random votes for 5 phrases in a random transcript' def handle(self, *args, **options): - users = User.objects.all() - transcript = Transcript.objects.random_transcript(in_progress=False).first() + users = User.objects.all()[:5] + transcript = Transcript.objects.random_transcript( + in_progress=False + ).first() phrases = transcript.phrases.all()[:5] for phrase in phrases: for user in users:
5fb609b13cf65ef3c29502b9b406b73f03873ab0
pathfinder/tests/BugTracker/Tests/stream-document.SF-2804823.XQUERY.py
pathfinder/tests/BugTracker/Tests/stream-document.SF-2804823.XQUERY.py
import os, sys try: import sybprocess except ImportError: # user private copy for old Python versions import MonetDBtesting.subprocess26 as subprocess def client(cmd, input = None): clt = subprocess.Popen(cmd, shell = True, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines = True) out, err = clt.communicate(input) sys.stdout.write(out) sys.stderr.write(err) def main(): xq_client = os.getenv('XQUERY_CLIENT') client('%s --input=my-document --collection=my-collection' % xq_client, '<document>test document</document>') client('%s -s "pf:documents()"' % xq_client) client('%s -s "pf:del-doc(\'my-document\')"' % xq_client) main()
import os, sys try: import sybprocess except ImportError: # user private copy for old Python versions import MonetDBtesting.subprocess26 as subprocess def client(cmd, input = None): clt = subprocess.Popen(cmd, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, universal_newlines = True) out, err = clt.communicate(input) sys.stdout.write(out) sys.stderr.write(err) def main(): xq_client = os.getenv('XQUERY_CLIENT').split() client(xq_client + ['--input=my-document', '--collection=my-collection'], '<document>test document</document>') client(xq_client + ['-s', 'for $doc in pf:documents() where $doc/@url = "my-document" return $doc']) client(xq_client + ['-s', 'pf:del-doc("my-document")']) main()
Make test independent of whatever else is in the database. Also, use a different way of calling subprocess.Popen so that we can use quotes and dollars without having to do difficult cross-architectural escaping.
Make test independent of whatever else is in the database. Also, use a different way of calling subprocess.Popen so that we can use quotes and dollars without having to do difficult cross-architectural escaping.
Python
mpl-2.0
zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb
--- +++ @@ -7,7 +7,6 @@ def client(cmd, input = None): clt = subprocess.Popen(cmd, - shell = True, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, @@ -17,10 +16,10 @@ sys.stderr.write(err) def main(): - xq_client = os.getenv('XQUERY_CLIENT') - client('%s --input=my-document --collection=my-collection' % xq_client, + xq_client = os.getenv('XQUERY_CLIENT').split() + client(xq_client + ['--input=my-document', '--collection=my-collection'], '<document>test document</document>') - client('%s -s "pf:documents()"' % xq_client) - client('%s -s "pf:del-doc(\'my-document\')"' % xq_client) + client(xq_client + ['-s', 'for $doc in pf:documents() where $doc/@url = "my-document" return $doc']) + client(xq_client + ['-s', 'pf:del-doc("my-document")']) main()
65ecd11b4d4689108eabd464377afdb20ff95240
rest_framework_simplejwt/utils.py
rest_framework_simplejwt/utils.py
from __future__ import unicode_literals from calendar import timegm from datetime import datetime from django.conf import settings from django.utils import six from django.utils.functional import lazy from django.utils.timezone import is_aware, make_aware, utc def make_utc(dt): if settings.USE_TZ and not is_aware(dt): return make_aware(dt, timezone=utc) return dt def aware_utcnow(): return make_utc(datetime.utcnow()) def datetime_to_epoch(dt): return timegm(dt.utctimetuple()) def datetime_from_epoch(ts): return make_utc(datetime.utcfromtimestamp(ts)) def format_lazy(s, *args, **kwargs): return s.format(*args, **kwargs) format_lazy = lazy(format_lazy, six.text_type)
from __future__ import unicode_literals from calendar import timegm from datetime import datetime from django.conf import settings from django.utils import six from django.utils.functional import lazy from django.utils.timezone import is_naive, make_aware, utc def make_utc(dt): if settings.USE_TZ and is_naive(dt): return make_aware(dt, timezone=utc) return dt def aware_utcnow(): return make_utc(datetime.utcnow()) def datetime_to_epoch(dt): return timegm(dt.utctimetuple()) def datetime_from_epoch(ts): return make_utc(datetime.utcfromtimestamp(ts)) def format_lazy(s, *args, **kwargs): return s.format(*args, **kwargs) format_lazy = lazy(format_lazy, six.text_type)
Use is_naive here for clarity
Use is_naive here for clarity
Python
mit
davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt
--- +++ @@ -6,11 +6,11 @@ from django.conf import settings from django.utils import six from django.utils.functional import lazy -from django.utils.timezone import is_aware, make_aware, utc +from django.utils.timezone import is_naive, make_aware, utc def make_utc(dt): - if settings.USE_TZ and not is_aware(dt): + if settings.USE_TZ and is_naive(dt): return make_aware(dt, timezone=utc) return dt
eb33d70bfda4857fbd76616cf3bf7fb7d7feec71
spoj/00005/palin.py
spoj/00005/palin.py
#!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, mid): palin[j] = '0' for j in range(mid, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
#!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, (n + 1) // 2): palin[j] = '0' for j in range((n + 1) // 2, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
Fix bug in ranges (to middle)
Fix bug in ranges (to middle) - in SPOJ palin Signed-off-by: Karel Ha <[email protected]>
Python
mit
mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming
--- +++ @@ -20,9 +20,9 @@ i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) - for j in range(i + 1, mid): + for j in range(i + 1, (n + 1) // 2): palin[j] = '0' - for j in range(mid, n): + for j in range((n + 1) // 2, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else:
b87ebdd208e365d135ba2e9d9c96d9e94b8caf8e
py/g1/http/servers/setup.py
py/g1/http/servers/setup.py
from setuptools import setup setup( name = 'g1.http.servers', packages = [ 'g1.http.servers', ], install_requires = [ 'g1.bases', 'g1.asyncs.kernels', 'g1.asyncs.servers', 'g1.networks.servers', ], extras_require = { 'parts': [ 'g1.apps[asyncs]', ], }, zip_safe = False, )
from setuptools import setup setup( name = 'g1.http.servers', packages = [ 'g1.http.servers', ], install_requires = [ 'g1.asyncs.kernels', 'g1.asyncs.servers', 'g1.bases', 'g1.networks.servers', ], extras_require = { 'parts': [ 'g1.apps[asyncs]', ], }, zip_safe = False, )
Fix import not ordered alphabetically
Fix import not ordered alphabetically
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
--- +++ @@ -6,9 +6,9 @@ 'g1.http.servers', ], install_requires = [ - 'g1.bases', 'g1.asyncs.kernels', 'g1.asyncs.servers', + 'g1.bases', 'g1.networks.servers', ], extras_require = {
e43596395507c4606909087c0e77e84c1a232811
damn/__init__.py
damn/__init__.py
""" **damn** (aka *digital audio for music nerds*) is an easy-to-use python package for digital audio signal processing, analysis and synthesis. """ __version__ = '0.0.0'
""" **damn** (aka *digital audio for music nerds*) is an easy-to-use python package for digital audio signal processing, analysis and synthesis. """ __author__ = 'Romain Clement' __copyright__ = 'Copyright 2014, Romain Clement' __credits__ = [] __license__ = 'MIT' __version__ = "0.0.0" __maintainer__ = 'Romain Clement' __email__ = '[email protected]' __status__ = 'Development'
Add meta information for damn package
[DEV] Add meta information for damn package
Python
mit
rclement/yodel,rclement/yodel
--- +++ @@ -3,4 +3,11 @@ for digital audio signal processing, analysis and synthesis. """ -__version__ = '0.0.0' +__author__ = 'Romain Clement' +__copyright__ = 'Copyright 2014, Romain Clement' +__credits__ = [] +__license__ = 'MIT' +__version__ = "0.0.0" +__maintainer__ = 'Romain Clement' +__email__ = '[email protected]' +__status__ = 'Development'
7c4a8d1249becb11727002c4eb2cd2f58c712244
zou/app/utils/emails.py
zou/app/utils/emails.py
from flask_mail import Message from zou.app import mail, app def send_email(subject, body, recipient_email, html=None): """ Send an email with given subject and body to given recipient. """ if html is None: html = body with app.app_context(): message = Message( sender="Kitsu Bot <[email protected]>", body=body, html=html, subject=subject, recipients=[recipient_email] ) mail.send(message)
from flask_mail import Message from zou.app import mail, app def send_email(subject, body, recipient_email, html=None): """ Send an email with given subject and body to given recipient. """ if html is None: html = body with app.app_context(): mail_default_sender = app.config["MAIL_DEFAULT_SENDER"] message = Message( sender="Kitsu Bot <%s>" % mail_default_sender, body=body, html=html, subject=subject, recipients=[recipient_email] ) mail.send(message)
Fix configuration of email default sender
Fix configuration of email default sender
Python
agpl-3.0
cgwire/zou
--- +++ @@ -10,8 +10,9 @@ if html is None: html = body with app.app_context(): + mail_default_sender = app.config["MAIL_DEFAULT_SENDER"] message = Message( - sender="Kitsu Bot <[email protected]>", + sender="Kitsu Bot <%s>" % mail_default_sender, body=body, html=html, subject=subject,
bcc0fbf9d8dd75b776c6a2137975912bcd0831d5
third_party/__init__.py
third_party/__init__.py
import os.path import sys # This bit of evil should inject third_party into the path for relative imports. sys.path.append(os.path.dirname(__file__))
import os.path import sys # This bit of evil should inject third_party into the path for relative imports. sys.path.insert(1, os.path.dirname(__file__))
Insert third_party into the second slot of sys.path rather than the last slot
Insert third_party into the second slot of sys.path rather than the last slot
Python
apache-2.0
nishad/namebench,jimb0616/namebench,cah0211/namebench,iamang/namebench,tcffisher/namebench,HerlonNascimento/namebench,RomanHargrave/namebench,tectronics/namebench,llaera/namebench,renatogames2/namebench,bluemask2001/namebench,Bandito43/namebench,ran0101/namebench,cloudcache/namebench,deepak5/namebench,thanhuwng/namebench,chosen1/namebench,doadin/namebench,jakeylube95/namebench,erasilva/namebench,MANICX100/namebench,ajitsonlion/namebench,kevinxw/namebench,rbenjamin/namebench,perrytm/namebench,tushevorg/namebench,wluizguedes/namebench,when30/namebench,PyroShark/namebench,jevgen/namebench,alexlovelltroy/namebench,nadeemat/namebench,xubayer786/namebench,ulaskaraoren/namebench,Max-Vader/namebench,xeoron/namebench,lukasfenix/namebench,sushifant/namebench,antar2801/namebench,webhost/namebench,hwuiwon/namebench,rubasben/namebench,dimazalfrianz/namebench,nt1st/namebench,techsd/namebench,phy0/namebench,sbalun/namebench,CookiesandCake/namebench,vishnunuk/namebench,chamakov/namebench,gdbdzgd/namebench,ItsAGeekThing/namebench,KingPsychopath/namebench,palimadra/namebench,seshin/namebench,donavoncade/namebench,TheNite/namebench,woozzoom/namebench,antsant/namebench,asolfre/namebench,Spindletop16/namebench,corruptnova/namebench,Arrowofdarkness/namebench,illAdvised/namebench,Jeff-Lewis/namebench,siripuramrk/namebench,stefrobb/namebench,kiseok7/namebench,feardax/namebench,unreal666/namebench,gavinfaux/namebench,CrazeeIvan/namebench,GLMeece/namebench,cvanwie/namebench,leeoo/namebench,watchamakulit02/namebench,BeZazz/lamebench,souzainf3/namebench,takuya/namebench,skuarch/namebench,cyranodb/namebench,ronzohan/namebench,renanrodm/namebench,manaure/namebench,accomac/namebench,sund/namebench,trulow/namebench,AgentN/namebench,KibaAmor/namebench,benklaasen/namebench,rosemead/namebench,wa111/namebench,alebcay/namebench,bgammill/namebench,jackjshin/namebench,jaechankim/namebench,Forgen/namebench,xxhank/namebench,MicroWorldwide/namebench,ZuluPro/namebench,21winner/namebench,repomain/namebench,deeb230/namebench,etxc/namebench,crocleco/namebench,hypnotika/namebench,thiagomagero/namebench,FatBumbleee/namebench,LavyshAlexander/namebench,Ritvik1512/namebench,petabytekr/namebench,qbektrix/namebench,ericmckean/namebench,edmilson19/namebench,LegitSavage/namebench,isoriss123/namebench,Hazer/namebench,eladelad/namebench,thatchristoph/namebench,snailbob/namebench,el-lumbergato/namebench,dsjr2006/namebench,imranrony/namebench,movermeyer/namebench,melissaihrig/namebench,Xeleste/namebench,hashem78/namebench,ajs124/namebench,aman-tugnawat/namebench,mystique1029/namebench,AdamHull/namebench,doantranhoang/namebench,mspringett/namebench,hitrust/namebench,Jasoning/namebench,RichardWilliamPearse/namebench,richardgroves/namebench,jaded44/namebench,omerhasan/namebench,yiyuandao/namebench,beermix/namebench,cartersgenes/namebench,TorpedoXL/namebench,michaeldavidcarr/namebench,santoshsahoo/namebench,uwevil/namebench,AViisiion/namebench,jtrag/namebench,felipsmartins/namebench,edesiocs/namebench,evelynmitchell/namebench,DanielAttia/namebench,fevangelou/namebench,shannonjlove/namebench,pyshcoder/namebench,edumatos/namebench,pacav69/namebench,fbidu/namebench,kristi29091988/namebench,razrichter/namebench,danieljl/namebench,MarnuLombard/namebench,Kudeshido/namebench,arjun372/namebench,Trinitaria/namebench,pombreda/namebench,jjoaonunes/namebench,jlobaton/namebench,teknix/namebench
--- +++ @@ -2,4 +2,4 @@ import sys # This bit of evil should inject third_party into the path for relative imports. -sys.path.append(os.path.dirname(__file__)) +sys.path.insert(1, os.path.dirname(__file__))
a5cc18bab108f83ab45073272fa467fc62a2649b
run_python_tests.py
run_python_tests.py
#!/usr/bin/python import os import optparse import sys import unittest USAGE = """%prog SDK_PATH TEST_PATH Run unit tests for App Engine apps. SDK_PATH Path to the SDK installation. TEST_PATH Path to package containing test modules. WEBTEST_PATH Path to the webtest library.""" def main(sdk_path, test_path, webtest_path): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() sys.path.append(webtest_path) suite = unittest.loader.TestLoader().discover(test_path, pattern="*test.py") return unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful() if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) != 3: print 'Error: Exactly 3 arguments required.' parser.print_help() sys.exit(1) SDK_PATH = args[0] TEST_PATH = args[1] WEBTEST_PATH = args[2] sys.exit(not main(SDK_PATH, TEST_PATH, WEBTEST_PATH))
#!/usr/bin/python import os import optparse import sys import unittest USAGE = """%prog SDK_PATH TEST_PATH Run unit tests for App Engine apps. SDK_PATH Path to the SDK installation. TEST_PATH Path to package containing test modules. WEBTEST_PATH Path to the webtest library.""" def main(sdk_path, test_path, webtest_path): sys.path.insert(0, sdk_path) import dev_appserver dev_appserver.fix_sys_path() sys.path.append(webtest_path) suite = unittest.loader.TestLoader().discover(test_path, pattern="*test.py") unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == '__main__': parser = optparse.OptionParser(USAGE) options, args = parser.parse_args() if len(args) != 3: print 'Error: Exactly 3 arguments required.' parser.print_help() sys.exit(1) SDK_PATH = args[0] TEST_PATH = args[1] WEBTEST_PATH = args[2] main(SDK_PATH, TEST_PATH, WEBTEST_PATH)
Revert "Python tests now return an error code on fail."
Revert "Python tests now return an error code on fail."
Python
bsd-3-clause
pquochoang/samples,jiayliu/apprtc,todotobe1/apprtc,jan-ivar/adapter,82488059/apprtc,shelsonjava/apprtc,procandi/apprtc,4lejandrito/adapter,overtakermtg/samples,mvenkatesh431/samples,JiYou/apprtc,martin7890/samples,Zauberstuhl/adapter,TribeMedia/samples,jiayliu/apprtc,bpyoung92/apprtc,aadebuger/docker-apprtc,xdumaine/adapter,mulyoved/samples,b-cuts/samples,jan-ivar/adapter,dengshaodong/docker-apprtc,smayoorans/samples,juberti/samples,martin7890/samples,Edward-Shawn/samples,mauricionr/samples,ralic/samples,jan-ivar/samples,keshwans/samples,harme199497/adapter,fetterov/samples,dengshaodong/docker-apprtc,fitraditya/samples,shines/adapter,Edward-Shawn/samples,dengshaodong/docker-apprtc,mvenkatesh431/apprtc,smadhusu/AppRTC,shelsonjava/apprtc,smbale/samples,JiYou/apprtc,arnauorriols/apprtc,shelsonjava/apprtc,taylor-b/samples,YouthAndra/apprtc,bpyoung92/apprtc,kod3r/samples,myself659/samples,leehz/samples,procandi/apprtc,Roarz/samples,mauricionr/samples,b-cuts/samples,4lejandrito/adapter,procandi/apprtc,EmreAkkoyun/sample,TheKnarf/apprtc,diddie06/webrtc,bemasc/samples,MahmoudFouad/samples,shelsonjava/samples,procandi/samples,volkanh/volkanh.github.io,virajs/apprtc,dengshaodong/docker-apprtc,Zauberstuhl/adapter,YouthAndra/apprtc,tsruban/samples,fetterov/samples,pquochoang/samples,akashrchoksi/newone,virajs/apprtc,tsruban/samples,virajs/samples,mvenkatesh431/apprtc,virajs/samples,pquochoang/samples,mvenkatesh431/samples,dushmis/webrtc,juberti/samples,overtakermtg/samples,jjrasche/cell-based-RC-control,bemasc/samples,guoweis/webrtc,mvenkatesh431/apprtc,TribeMedia/apprtc,samdutton/apprtc,shelsonjava/apprtc,bemasc/samples,bpyoung92/apprtc,webrtc/samples,jarl-alejandro/apprtc,virajs/apprtc,jjrasche/cell-based-RC-control,shines/adapter,fitraditya/samples,jiayliu/apprtc,mauricionr/samples,shelsonjava/samples,procandi/samples,Acidburn0zzz/adapter,bbandaru/samples,JiYou/apprtc,mulyoved/samples,diddie06/webrtc,samdutton/apprtc,calebboyd/adapter,xdumaine/samples,aadebuger/docker-apprtc,smadhusu/AppRTC,leehz/samples,todotobe1/samples,taylor-b/samples,YouthAndra/apprtc,82488059/apprtc,guoweis/webrtc,smayoorans/samples,mvenkatesh431/apprtc,martin7890/samples,webrtc/apprtc,jarl-alejandro/apprtc,samdutton/apprtc,smbale/samples,knightsofaa/webrtc,jjrasche/cell-based-RC-control,aadebuger/docker-apprtc,xdumaine/adapter,arnauorriols/apprtc,keshwans/samples,samdutton/apprtc,shelsonjava/samples,smadhusu/AppRTC,TheKnarf/apprtc,smadhusu/AppRTC,TribeMedia/samples,volkanh/volkanh.github.io,harme199497/adapter,dajise/samples,mvenkatesh431/samples,YouthAndra/apprtc,samdutton/apprtc,overtakermtg/samples,MahmoudFouad/samples,Acidburn0zzz/adapter,oliverhuangchao/samples,Acidburn0zzz/adapter,fippo/webrtc,TribeMedia/samples,todotobe1/apprtc,myself659/samples,Zauberstuhl/adapter,arnauorriols/apprtc,kod3r/samples,akashrchoksi/newone,xdumaine/samples,todotobe1/apprtc,JiYou/apprtc,oliverhuangchao/samples,shelsonjava/apprtc,mvenkatesh431/apprtc,bpyoung92/apprtc,procandi/apprtc,fippo/apprtc,askdaddy/samples,Edward-Shawn/samples,todotobe1/apprtc,harme199497/adapter,dengshaodong/docker-apprtc,webrtc/adapter,todotobe1/samples,dushmis/webrtc,webrtc/apprtc,JiYou/apprtc,calebboyd/adapter,smbale/samples,TheKnarf/apprtc,TribeMedia/apprtc,calebboyd/adapter,82488059/apprtc,jarl-alejandro/apprtc,fippo/apprtc,b-cuts/samples,82488059/apprtc,kod3r/samples,jan-ivar/samples,jiayliu/apprtc,samdutton/webrtc,TribeMedia/apprtc,xdumaine/samples,82488059/apprtc,fippo/apprtc,virajs/apprtc,bbandaru/samples,fippo/webrtc,smadhusu/AppRTC,todotobe1/apprtc,TribeMedia/apprtc,aadebuger/docker-apprtc,shaohung001/samples,leehz/samples,webrtc/apprtc,virajs/apprtc,fitraditya/samples,oliverhuangchao/samples,dajise/samples,jiayliu/apprtc,aadebuger/docker-apprtc,procandi/samples,webrtc/samples,dajise/samples,arnauorriols/apprtc,todotobe1/samples,volkanh/volkanh.github.io,mulyoved/samples,tsruban/samples,samdutton/webrtc,TheKnarf/apprtc,4lejandrito/adapter,jjrasche/cell-based-RC-control,webrtc/apprtc,bbandaru/samples,YouthAndra/apprtc,Roarz/samples,jarl-alejandro/apprtc,webrtc/apprtc,virajs/samples,MahmoudFouad/samples,askdaddy/samples,knightsofaa/webrtc,TribeMedia/apprtc,guoweis/webrtc,bpyoung92/apprtc,akashrchoksi/newone,fetterov/samples,shaohung001/samples,myself659/samples,webrtc/adapter,fippo/apprtc,smayoorans/samples,ralic/samples,shines/adapter,fippo/apprtc,arnauorriols/apprtc,TheKnarf/apprtc,EmreAkkoyun/sample,jarl-alejandro/apprtc,procandi/apprtc,ralic/samples,Roarz/samples,EmreAkkoyun/sample
--- +++ @@ -20,7 +20,7 @@ sys.path.append(webtest_path) suite = unittest.loader.TestLoader().discover(test_path, pattern="*test.py") - return unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful() + unittest.TextTestRunner(verbosity=2).run(suite) if __name__ == '__main__': @@ -33,4 +33,4 @@ SDK_PATH = args[0] TEST_PATH = args[1] WEBTEST_PATH = args[2] - sys.exit(not main(SDK_PATH, TEST_PATH, WEBTEST_PATH)) + main(SDK_PATH, TEST_PATH, WEBTEST_PATH)
4e882ef7025581014520311734e0b6d626f3fd8d
impactstoryanalytics/highcharts.py
impactstoryanalytics/highcharts.py
boilerplate = { 'chart': { 'renderTo': 'container', 'plotBackgroundColor': 'none', 'backgroundColor'': 'none', }, 'title': {'text': 'null'} 'subtitle': {'text': 'null'} 'credits': { 'enabled': False }, 'plotOptions': { 'series': { 'marker': { 'enabled': false } } }, }
boilerplate = { 'chart': { 'renderTo': 'container', 'plotBackgroundColor': 'none', 'backgroundColor': 'none', }, 'title': {'text': 'null'}, 'subtitle': {'text': 'null'}, 'credits': { 'enabled': False }, 'plotOptions': { 'series': { 'marker': { 'enabled': False } } }, }
Fix bugs in new Highcharts options
Fix bugs in new Highcharts options
Python
mit
total-impact/impactstory-analytics,Impactstory/impactstory-analytics,Impactstory/impactstory-analytics,Impactstory/impactstory-analytics,Impactstory/impactstory-analytics,total-impact/impactstory-analytics,total-impact/impactstory-analytics,total-impact/impactstory-analytics
--- +++ @@ -2,17 +2,17 @@ 'chart': { 'renderTo': 'container', 'plotBackgroundColor': 'none', - 'backgroundColor'': 'none', + 'backgroundColor': 'none', }, - 'title': {'text': 'null'} - 'subtitle': {'text': 'null'} + 'title': {'text': 'null'}, + 'subtitle': {'text': 'null'}, 'credits': { 'enabled': False }, 'plotOptions': { 'series': { 'marker': { - 'enabled': false + 'enabled': False } } },
6cacf9c97e485308b95ad9a075aa5caf67f2b574
pi_lapse.py
pi_lapse.py
import subprocess from datetime import datetime, timedelta frame_counter = 1 # Time in seconds # 1 Hour = 3600 # 1 Day = 86400 # Time between each photo (seconds) time_between_frames = 3 # Duration of Time Lapse (seconds) duration = 86400 # Image Dimensions (pixels) image_height = 972 image_width = 1296 total_frames = duration / time_between_frames def capture_image(): t = datetime.now() filename = "capture_%04d-%02d-%02d_%02d-%02d-%02d.jpg" % (t.year, t.month, t.day, t.hour, t.minute, t.second) subprocess.call("raspistill -w %d -h %d -e jpg -q 15 -o %s" % (image_width, image_height, filename), shell = True) print("Captured Image %d of %d, named: %s" % (frame_counter, total_frames, filename)) last_capture = datetime.now() while frame_counter < total_frames: if last_capture < (datetime.now() - timedelta(seconds = time_between_frames)): last_capture = datetime.now() capture_image() frame_counter += 1
import subprocess from datetime import datetime, timedelta frame_counter = 1 # Time in seconds # 1 Hour = 3600 # 1 Day = 86400 # Time between each photo (seconds) time_between_frames = 60 # Duration of Time Lapse (seconds) duration = 86400 # Image Dimensions (pixels) image_height = 972 image_width = 1296 total_frames = duration / time_between_frames def capture_image(): t = datetime.now() filename = "capture_%04d-%02d-%02d_%02d-%02d-%02d.jpg" % (t.year, t.month, t.day, t.hour, t.minute, t.second) subprocess.call("raspistill -w %d -h %d -e jpg -q 15 -o %s" % (image_width, image_height, filename), shell = True) print("Captured Image %d of %d, named: %s" % (frame_counter, total_frames, filename)) last_capture = datetime.now() while frame_counter < total_frames: if last_capture < (datetime.now() - timedelta(seconds = time_between_frames)): last_capture = datetime.now() capture_image() frame_counter += 1
Change default time between images
Change default time between images
Python
mit
tiimgreen/pi_lapse
--- +++ @@ -7,7 +7,7 @@ # 1 Day = 86400 # Time between each photo (seconds) -time_between_frames = 3 +time_between_frames = 60 # Duration of Time Lapse (seconds) duration = 86400
49e95022577eb40bcf9e1d1c9f95be7269fd0e3b
scripts/update_acq_stats.py
scripts/update_acq_stats.py
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst from mica.stats import update_acq_stats update_acq_stats.main() import os table_file = mica.stats.acq_stats.table_file file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
#!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst import os from mica.stats import update_acq_stats import mica.stats.acq_stats update_acq_stats.main() table_file = mica.stats.acq_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print(""" Warning: {tfile} is larger than 50MB and may need Warning: to be manually repacked (i.e.): Warning: Warning: ptrepack --chunkshape=auto --propindexes --keep-source-filters {tfile} compressed.h5 Warning: cp compressed.h5 {tfile} """.format(tfile=table_file))
Fix reference to acq table file in script
Fix reference to acq table file in script
Python
bsd-3-clause
sot/mica,sot/mica
--- +++ @@ -1,11 +1,12 @@ #!/usr/bin/env python # Licensed under a 3-clause BSD style license - see LICENSE.rst - +import os from mica.stats import update_acq_stats +import mica.stats.acq_stats update_acq_stats.main() -import os -table_file = mica.stats.acq_stats.table_file + +table_file = mica.stats.acq_stats.TABLE_FILE file_stat = os.stat(table_file) if file_stat.st_size > 50e6: print("""
8a6144fc3918856cb2259f65f9ee5cc9cfaf1fdc
locustfile.py
locustfile.py
from locust import HttpLocust, TaskSet, task class UserBehavior(TaskSet): tasks = [] def on_start(self): pass @task def index(self): self.client.get("/") @task def move_map(self): self.client.get("") @task def select_scene(self): # Get url self.client.get() @task def render_preview(self): self.client.get() @task def render_full(self): self.client.get() class WebsiteUser(HttpLocust): task_set = UserBehavior min_wait = 1000 max_wait = 5000
from locust import HttpLocust, TaskSet, task from bs4 import BeautifulSoup from requests import Session import random class UserBehavior(TaskSet): def on_start(self): pass @task def index(self): self.client.get("/") @task def move_map(self): lat = random.uniform(-1, 1) lon = random.uniform(-1, 1) response = self.client.post( url="/ajax", data={'lat': lat, 'lng': lng,} ) self.client.get("") @task def select_scene(self): # Get url soup = BeautifulSoup(self.client.get("")) self.client.get() @task def render_preview(self): self.client.get() @task def render_full(self): self.client.get() class WebsiteUser(HttpLocust): task_set = UserBehavior min_wait = 1000 max_wait = 5000
Add random functionality to map move.
Add random functionality to map move.
Python
mit
recombinators/snapsat,recombinators/snapsat,recombinators/snapsat
--- +++ @@ -1,9 +1,10 @@ from locust import HttpLocust, TaskSet, task +from bs4 import BeautifulSoup +from requests import Session +import random class UserBehavior(TaskSet): - tasks = [] - def on_start(self): pass @@ -13,22 +14,30 @@ @task def move_map(self): + lat = random.uniform(-1, 1) + lon = random.uniform(-1, 1) + response = self.client.post( + url="/ajax", + data={'lat': lat, 'lng': lng,} + ) + self.client.get("") - @task - def select_scene(self): - # Get url + @task + def select_scene(self): + # Get url + soup = BeautifulSoup(self.client.get("")) - self.client.get() - - @task - def render_preview(self): self.client.get() - @task - def render_full(self): - self.client.get() + @task + def render_preview(self): + self.client.get() + + @task + def render_full(self): + self.client.get() class WebsiteUser(HttpLocust):
3b41e2166adde50f36f8f7ea389c80b76b83acaf
test/test_wavedrom.py
test/test_wavedrom.py
import subprocess from utils import * @all_files_in_dir('wavedrom_0') def test_wavedrom_0(datafiles): with datafiles.as_cwd(): subprocess.check_call(['python3', 'wavedrom-test.py']) @all_files_in_dir('wavedrom_1') def test_wavedrom_1(datafiles): with datafiles.as_cwd(): for s in get_simulators(): subprocess.check_call(['runSVUnit', '-s', s, '-w']) expect_testrunner_pass('run.log')
import subprocess from utils import * @all_files_in_dir('wavedrom_0') def test_wavedrom_0(datafiles): with datafiles.as_cwd(): subprocess.check_call(['python3', 'wavedrom-test.py']) @all_files_in_dir('wavedrom_1') @all_available_simulators() def test_wavedrom_1(datafiles, simulator): with datafiles.as_cwd(): subprocess.check_call(['runSVUnit', '-s', simulator, '-w']) expect_testrunner_pass('run.log')
Update wavedrom tests to get simulators via fixture
Update wavedrom tests to get simulators via fixture
Python
apache-2.0
nosnhojn/svunit-code,svunit/svunit,nosnhojn/svunit-code,svunit/svunit,svunit/svunit,nosnhojn/svunit-code
--- +++ @@ -9,8 +9,8 @@ @all_files_in_dir('wavedrom_1') -def test_wavedrom_1(datafiles): +@all_available_simulators() +def test_wavedrom_1(datafiles, simulator): with datafiles.as_cwd(): - for s in get_simulators(): - subprocess.check_call(['runSVUnit', '-s', s, '-w']) - expect_testrunner_pass('run.log') + subprocess.check_call(['runSVUnit', '-s', simulator, '-w']) + expect_testrunner_pass('run.log')
9d041287e5e0d1950d5dcda23f6f68522d287282
tests/test_machine.py
tests/test_machine.py
import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') assert len(lattice) == 173 for element in lattice.get_elements(): assert element.get_pv_name('readback')
import rml.machines def test_machine_load_elements(): lattice = rml.machines.get_elements(machine='SRI21', elem_type='BPM') assert len(lattice) == 173 for element in lattice.get_elements(): assert isinstance(element.get_pv_name('readback', 'x'), str) assert isinstance(element.get_pv_name('readback', 'y'), str)
Test if pvs are loaded correctly from the database
Test if pvs are loaded correctly from the database
Python
apache-2.0
willrogers/pml,willrogers/pml,razvanvasile/RML
--- +++ @@ -2,7 +2,8 @@ def test_machine_load_elements(): - lattice = rml.machines.get_elements(machine='SRI21', elemType='BPM') + lattice = rml.machines.get_elements(machine='SRI21', elem_type='BPM') assert len(lattice) == 173 for element in lattice.get_elements(): - assert element.get_pv_name('readback') + assert isinstance(element.get_pv_name('readback', 'x'), str) + assert isinstance(element.get_pv_name('readback', 'y'), str)
4a3df7842ab8f305ece134aa223801007d55c4f9
timm/utils/metrics.py
timm/utils/metrics.py
""" Eval metrics and related Hacked together by / Copyright 2020 Ross Wightman """ class AverageMeter: """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0 self.avg = 0 self.sum = 0 self.count = 0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count def accuracy(output, target, topk=(1,)): """Computes the accuracy over the k top predictions for the specified values of k""" maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) return [correct[:k].view(-1).float().sum(0) * 100. / batch_size for k in topk]
""" Eval metrics and related Hacked together by / Copyright 2020 Ross Wightman """ class AverageMeter: """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0 self.avg = 0 self.sum = 0 self.count = 0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count def accuracy(output, target, topk=(1,)): """Computes the accuracy over the k top predictions for the specified values of k""" maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.reshape(1, -1).expand_as(pred)) return [correct[:k].reshape(-1).float().sum(0) * 100. / batch_size for k in topk]
Fix topn metric view regression on PyTorch 1.7
Fix topn metric view regression on PyTorch 1.7
Python
apache-2.0
rwightman/pytorch-image-models,rwightman/pytorch-image-models
--- +++ @@ -28,5 +28,5 @@ batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() - correct = pred.eq(target.view(1, -1).expand_as(pred)) - return [correct[:k].view(-1).float().sum(0) * 100. / batch_size for k in topk] + correct = pred.eq(target.reshape(1, -1).expand_as(pred)) + return [correct[:k].reshape(-1).float().sum(0) * 100. / batch_size for k in topk]
362c8dacda35bac24aa83e4fcaa2f6bac37150fd
tests/test_mw_util.py
tests/test_mw_util.py
"""Unit tests for cat2cohort.""" import unittest from mw_util import str2cat class TestMWutil(unittest.TestCase): """Test methods from mw_util.""" pass
"""Unit tests for cat2cohort.""" import unittest from mw_util import str2cat class TestMWutil(unittest.TestCase): """Test methods from mw_util.""" def test_str2cat(self): """Test str2cat.""" values = [ ('A', 'Category:A'), ('Category:B', 'Category:B'), ] for value, expected in values: self.assertEqual(str2cat(value), expected)
Add unit test for str2cat method.
Add unit test for str2cat method.
Python
mit
Commonists/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics,Commonists/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,danmichaelo/wm_metrics,Commonists/wm_metrics
--- +++ @@ -8,4 +8,11 @@ """Test methods from mw_util.""" - pass + def test_str2cat(self): + """Test str2cat.""" + values = [ + ('A', 'Category:A'), + ('Category:B', 'Category:B'), + ] + for value, expected in values: + self.assertEqual(str2cat(value), expected)
ebf52caf6ee09ef1f15cb88815a1fb8008899c79
tests/test_reactjs.py
tests/test_reactjs.py
# -*- coding: utf-8 -*- import dukpy class TestReactJS(object): def test_hello_world(self): jsx = dukpy.jsx_compile('var react_hello = <h1>Hello, world!</h1>;') jsi = dukpy.JSInterpreter() result = jsi.evaljs([ ''' var React = require('react/react'), ReactDOM = require('react/react-dom-server'); ''', jsx, 'ReactDOM.renderToStaticMarkup(react_hello, null);' ]) assert result == '<h1>Hello, world!</h1>'
# -*- coding: utf-8 -*- import dukpy class TestReactJS(object): def test_hello_world(self): jsx = dukpy.jsx_compile('var react_hello = <h1>Hello, world!</h1>;') jsi = dukpy.JSInterpreter() result = jsi.evaljs([ ''' var React = require('react/react'), ReactDOM = require('react/react-dom-server'); ''', jsx, 'ReactDOM.renderToStaticMarkup(react_hello, null);' ]) assert result == '<h1>Hello, world!</h1>', res def test_jsx_mixed(self): code = ''' var React = require('react/react'), ReactDOM = require('react/react-dom-server'); ReactDOM.renderToStaticMarkup(<h1>Hello, world!</h1>, null); ''' jsx = dukpy.jsx_compile(code) res = dukpy.evaljs(jsx) assert res == '<h1>Hello, world!</h1>', res def test_react_binding(self): code = ''' var React = require('react/react'), ReactDOM = require('react/react-dom-server'); var HelloWorld = React.createClass({ render: function() { return ( <div className="helloworld"> Hello {this.props.data.name} </div> ); } }); ReactDOM.renderToStaticMarkup(<HelloWorld data={dukpy.data}/>, null); ''' jsx = dukpy.jsx_compile(code) res = dukpy.evaljs(jsx, data={'id': 1, 'name': "Alessandro"}) assert res == '<div class="helloworld">Hello Alessandro</div>', res
Add tests for a React Component
Add tests for a React Component
Python
mit
amol-/dukpy,amol-/dukpy,amol-/dukpy
--- +++ @@ -14,4 +14,35 @@ jsx, 'ReactDOM.renderToStaticMarkup(react_hello, null);' ]) - assert result == '<h1>Hello, world!</h1>' + assert result == '<h1>Hello, world!</h1>', res + + def test_jsx_mixed(self): + code = ''' +var React = require('react/react'), + ReactDOM = require('react/react-dom-server'); +ReactDOM.renderToStaticMarkup(<h1>Hello, world!</h1>, null); +''' + jsx = dukpy.jsx_compile(code) + res = dukpy.evaljs(jsx) + assert res == '<h1>Hello, world!</h1>', res + + def test_react_binding(self): + code = ''' +var React = require('react/react'), + ReactDOM = require('react/react-dom-server'); + +var HelloWorld = React.createClass({ + render: function() { + return ( + <div className="helloworld"> + Hello {this.props.data.name} + </div> + ); + } +}); + +ReactDOM.renderToStaticMarkup(<HelloWorld data={dukpy.data}/>, null); +''' + jsx = dukpy.jsx_compile(code) + res = dukpy.evaljs(jsx, data={'id': 1, 'name': "Alessandro"}) + assert res == '<div class="helloworld">Hello Alessandro</div>', res
15240d40ad027dd9fa058d07b289f681e7d3e487
src/webassets/filter/clevercss.py
src/webassets/filter/clevercss.py
from __future__ import absolute_import from webassets.filter import Filter __all__ = ('CleverCSS',) class CleverCSS(Filter): """Converts `CleverCSS <http://sandbox.pocoo.org/clevercss/>`_ markup to real CSS. If you want to combine it with other CSS filters, make sure this one runs first. """ name = 'clevercss' max_debug_level = None def setup(self): import clevercss self.clevercss = clevercss def output(self, _in, out, **kw): out.write(self.clevercss.convert(_in.read()))
from __future__ import absolute_import from webassets.filter import Filter __all__ = ('CleverCSS',) class CleverCSS(Filter): """Converts `CleverCSS <http://sandbox.pocoo.org/clevercss/>`_ markup to real CSS. If you want to combine it with other CSS filters, make sure this one runs first. """ name = 'clevercss' max_debug_level = None def setup(self): import clevercss self.clevercss = clevercss def output(self, _in, out, **kw): out.write(self.clevercss.convert(_in.read()))
Clean a weird unicode char in beginning of file
Clean a weird unicode char in beginning of file The first char in clevercss.py filter is <U+FEFF>, which doesn't show up in vim or other editors. Can view it in less and can edit it using nano. This was throwing up UnicodeDecodeErrors when running nosetests with html output for coverage.
Python
bsd-2-clause
0x1997/webassets,john2x/webassets,wijerasa/webassets,0x1997/webassets,aconrad/webassets,heynemann/webassets,florianjacob/webassets,florianjacob/webassets,heynemann/webassets,JDeuce/webassets,aconrad/webassets,glorpen/webassets,JDeuce/webassets,glorpen/webassets,john2x/webassets,aconrad/webassets,scorphus/webassets,wijerasa/webassets,heynemann/webassets,scorphus/webassets,glorpen/webassets
--- +++ @@ -1,4 +1,4 @@ -from __future__ import absolute_import +from __future__ import absolute_import from webassets.filter import Filter
484e5693b2f3e0bc8c238cd64afeaad17bfa6673
skimage/viewer/qt/QtCore.py
skimage/viewer/qt/QtCore.py
from . import qt_api if qt_api == 'pyside': from PySide.QtCore import * elif qt_api == 'pyqt': from PyQt4.QtCore import * else: # Mock objects Qt = None def pyqtSignal(*args, **kwargs): pass
from . import qt_api if qt_api == 'pyside': from PySide.QtCore import * elif qt_api == 'pyqt': from PyQt4.QtCore import * else: # Mock objects for buildbot (which doesn't have Qt, but imports viewer). class Qt(object): TopDockWidgetArea = None BottomDockWidgetArea = None LeftDockWidgetArea = None RightDockWidgetArea = None def pyqtSignal(*args, **kwargs): pass
Add attributes to Mock object to fix Travis build
Add attributes to Mock object to fix Travis build
Python
bsd-3-clause
ajaybhat/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,SamHames/scikit-image,SamHames/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,blink1073/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,almarklein/scikit-image,paalge/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,youprofit/scikit-image,paalge/scikit-image,emon10005/scikit-image,emon10005/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,blink1073/scikit-image,bsipocz/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,almarklein/scikit-image,youprofit/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,newville/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,paalge/scikit-image,oew1v07/scikit-image,ajaybhat/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,chintak/scikit-image,robintw/scikit-image,michaelpacer/scikit-image,warmspringwinds/scikit-image,Britefury/scikit-image,newville/scikit-image,keflavich/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,rjeli/scikit-image,Britefury/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image
--- +++ @@ -5,7 +5,12 @@ elif qt_api == 'pyqt': from PyQt4.QtCore import * else: - # Mock objects - Qt = None + # Mock objects for buildbot (which doesn't have Qt, but imports viewer). + class Qt(object): + TopDockWidgetArea = None + BottomDockWidgetArea = None + LeftDockWidgetArea = None + RightDockWidgetArea = None + def pyqtSignal(*args, **kwargs): pass
2a32fc912a5839f627a216918e4671e6547ee53b
tests/utils/driver.py
tests/utils/driver.py
import os from importlib import import_module from .testdriver import TestDriver class Driver(TestDriver): drivers = {} def __new__(cls, type, *args, **kwargs): if type not in cls.drivers: try: mod = import_module('onitu.drivers.{}.tests.driver'. format(type)) except ImportError: raise KeyError("No such driver {}".format(repr(type))) cls.drivers[type] = mod.Driver return cls.drivers[type](*args, **kwargs) class LocalStorageDriver(TestDriver): def __new__(cls, *args, **kwargs): return Driver('local_storage', *args, **kwargs) class TargetDriver(Driver): def __new__(cls, *args, **kwargs): type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage') return Driver(type, *args, **kwargs)
import os import pkg_resources from .testdriver import TestDriver class Driver(TestDriver): drivers = {} def __new__(cls, name, *args, **kwargs): entry_points = pkg_resources.iter_entry_points('onitu.tests') tests_modules = {e.name: e for e in entry_points} if name not in tests_modules: raise ImportError( "Cannot import tests for driver {}".format(name) ) try: tests = tests_modules[name].load() except ImportError as e: raise ImportError( "Error importing tests for driver {}: {}".format(name, e) ) try: driver = tests.Driver except ImportError: raise ImportError( "Tests for driver {} don't expose a" "Driver class".format(name) ) cls.drivers[name] = driver return driver(*args, **kwargs) class LocalStorageDriver(TestDriver): def __new__(cls, *args, **kwargs): return Driver('local_storage', *args, **kwargs) class TargetDriver(Driver): def __new__(cls, *args, **kwargs): type = os.environ.get('ONITU_TEST_DRIVER', 'local_storage') return Driver(type, *args, **kwargs)
Load tests helpers using entry_points
Load tests helpers using entry_points
Python
mit
onitu/onitu,onitu/onitu,onitu/onitu
--- +++ @@ -1,5 +1,5 @@ import os -from importlib import import_module +import pkg_resources from .testdriver import TestDriver @@ -7,15 +7,32 @@ class Driver(TestDriver): drivers = {} - def __new__(cls, type, *args, **kwargs): - if type not in cls.drivers: - try: - mod = import_module('onitu.drivers.{}.tests.driver'. - format(type)) - except ImportError: - raise KeyError("No such driver {}".format(repr(type))) - cls.drivers[type] = mod.Driver - return cls.drivers[type](*args, **kwargs) + def __new__(cls, name, *args, **kwargs): + entry_points = pkg_resources.iter_entry_points('onitu.tests') + tests_modules = {e.name: e for e in entry_points} + + if name not in tests_modules: + raise ImportError( + "Cannot import tests for driver {}".format(name) + ) + + try: + tests = tests_modules[name].load() + except ImportError as e: + raise ImportError( + "Error importing tests for driver {}: {}".format(name, e) + ) + + try: + driver = tests.Driver + except ImportError: + raise ImportError( + "Tests for driver {} don't expose a" + "Driver class".format(name) + ) + + cls.drivers[name] = driver + return driver(*args, **kwargs) class LocalStorageDriver(TestDriver):
86f6191867141d7a7a165b227255d7b4406eb4f4
accounts/utils.py
accounts/utils.py
""" Utility functions for the accounts app. """ from django.core.exceptions import ObjectDoesNotExist def get_user_city(user): """Return the user's city. If unavailable, return an empty string.""" # If the profile is absent (i.e. superuser), return None. try: city = user.common_profile.city except ObjectDoesNotExist: city = '' return city def get_user_gender(user): """Return the user's city. If unavailable, return an empty string.""" # If either the profile (i.e. superuser) or the college # (i.e. non-student) are absent, return an empty string. try: gender = user.common_profile.college.gender except (ObjectDoesNotExist, AttributeError): gender = '' return gender
""" Utility functions for the accounts app. """ from django.core.exceptions import ObjectDoesNotExist def get_user_city(user): """Return the user's city. If unavailable, return an empty string.""" # If the profile is absent (i.e. superuser), return None. try: city = user.common_profile.city except (ObjectDoesNotExist, AttributeError): city = '' return city def get_user_gender(user): """Return the user's city. If unavailable, return an empty string.""" # If either the profile (i.e. superuser) or the college # (i.e. non-student) are absent, return an empty string. try: gender = user.common_profile.college.gender except (ObjectDoesNotExist, AttributeError): gender = '' return gender
Fix crash on non-logged in users.
Fix crash on non-logged in users.
Python
agpl-3.0
osamak/student-portal,osamak/student-portal,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,enjaz/enjaz
--- +++ @@ -8,7 +8,7 @@ # If the profile is absent (i.e. superuser), return None. try: city = user.common_profile.city - except ObjectDoesNotExist: + except (ObjectDoesNotExist, AttributeError): city = '' return city
1f55fe2e67df7826c6f99ca2874a56ccbe2bfc02
dsub/_dsub_version.py
dsub/_dsub_version.py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.2.3.dev0'
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Single source of truth for dsub's version. This must remain small and dependency-free so that any dsub module may import it without creating circular dependencies. Note that this module is parsed as a text file by setup.py and changes to the format of this file could break setup.py. The version should follow formatting requirements specified in PEP-440. - https://www.python.org/dev/peps/pep-0440 A typical release sequence will be versioned as: 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ DSUB_VERSION = '0.2.3'
Update dsub version to 0.2.3.
Update dsub version to 0.2.3. PiperOrigin-RevId: 222307052
Python
apache-2.0
DataBiosphere/dsub,DataBiosphere/dsub
--- +++ @@ -26,4 +26,4 @@ 0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ... """ -DSUB_VERSION = '0.2.3.dev0' +DSUB_VERSION = '0.2.3'
6795e112e4f7037449850a361ab6b2f85fc2a66e
service/settings/staging.py
service/settings/staging.py
from service.settings.production import * ALLOWED_HOSTS = [ 'fantastic-doodle--staging.herokuapp.com', ]
from service.settings.production import * ALLOWED_HOSTS = [ 'fantastic-doodle--staging.herokuapp.com', '.herokuapp.com', ]
Add .herokuapp.com to ALLOWED_HOSTS to support review apps
Add .herokuapp.com to ALLOWED_HOSTS to support review apps
Python
unlicense
Mystopia/fantastic-doodle
--- +++ @@ -2,4 +2,5 @@ ALLOWED_HOSTS = [ 'fantastic-doodle--staging.herokuapp.com', + '.herokuapp.com', ]
3800c095f58e9bc2ca8c580537ea576049bbfe2d
sell/urls.py
sell/urls.py
from django.conf.urls import url from sell import views urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^personal/$', views.personal_data), url(r'^books/$', views.books), url(r'^summary/$', views.summary), ]
from django.conf.urls import url from sell import views urlpatterns = [ url(r'^$', views.index), url(r'^personal/$', views.personal_data), url(r'^books/$', views.books), url(r'^summary/$', views.summary), ]
Remove unnecessary URL name in Sell app
Remove unnecessary URL name in Sell app
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
--- +++ @@ -4,7 +4,7 @@ urlpatterns = [ - url(r'^$', views.index, name='index'), + url(r'^$', views.index), url(r'^personal/$', views.personal_data), url(r'^books/$', views.books), url(r'^summary/$', views.summary),
c47c043e76ac037456b8e966a5f9d60a151e3120
elodie/geolocation.py
elodie/geolocation.py
from os import path from ConfigParser import ConfigParser import requests import sys def reverse_lookup(lat, lon): if(lat is None or lon is None): return None if not path.exists('./config.ini'): return None config = ConfigParser() config.read('./config.ini') if('MapQuest' not in config.sections()): return None key = config.get('MapQuest', 'key') try: r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon)) return r.json() except requests.exceptions.RequestException as e: print e return None except ValueError as e: print r.text print e return None def place_name(lat, lon): geolocation_info = reverse_lookup(lat, lon) if(geolocation_info is not None): if('address' in geolocation_info): address = geolocation_info['address'] if('city' in address): return address['city'] elif('state' in address): return address['state'] elif('country' in address): return address['country'] return None
from os import path from ConfigParser import ConfigParser import requests import sys def reverse_lookup(lat, lon): if(lat is None or lon is None): return None config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__))) if not path.exists(config_file): return None config = ConfigParser() config.read(config_file) if('MapQuest' not in config.sections()): return None key = config.get('MapQuest', 'key') try: r = requests.get('https://open.mapquestapi.com/nominatim/v1/reverse.php?key=%s&lat=%s&lon=%s&format=json' % (key, lat, lon)) return r.json() except requests.exceptions.RequestException as e: print e return None except ValueError as e: print r.text print e return None def place_name(lat, lon): geolocation_info = reverse_lookup(lat, lon) if(geolocation_info is not None): if('address' in geolocation_info): address = geolocation_info['address'] if('city' in address): return address['city'] elif('state' in address): return address['state'] elif('country' in address): return address['country'] return None
Use absolute path for config file so it works with apps like Hazel
Use absolute path for config file so it works with apps like Hazel
Python
apache-2.0
zserg/elodie,zingo/elodie,jmathai/elodie,jmathai/elodie,zingo/elodie,zserg/elodie,zserg/elodie,jmathai/elodie,zserg/elodie,jmathai/elodie,zingo/elodie
--- +++ @@ -7,11 +7,12 @@ if(lat is None or lon is None): return None - if not path.exists('./config.ini'): + config_file = '%s/config.ini' % path.dirname(path.dirname(path.abspath(__file__))) + if not path.exists(config_file): return None config = ConfigParser() - config.read('./config.ini') + config.read(config_file) if('MapQuest' not in config.sections()): return None
678a6ed9e24b8f91933584ab211ee9f3b2643aad
nipy/modalities/fmri/__init__.py
nipy/modalities/fmri/__init__.py
""" TODO """ __docformat__ = 'restructuredtext' import fmri, hrf, utils import fmristat from nipy.testing import Tester test = Tester().test bench = Tester().bench
""" TODO """ __docformat__ = 'restructuredtext' import fmri, hrf, utils, formula import fmristat from nipy.testing import Tester test = Tester().test bench = Tester().bench
Fix missing import of formula in fmri.
Fix missing import of formula in fmri.
Python
bsd-3-clause
alexis-roche/nipy,alexis-roche/nireg,arokem/nipy,arokem/nipy,bthirion/nipy,arokem/nipy,alexis-roche/nipy,nipy/nireg,alexis-roche/register,nipy/nipy-labs,alexis-roche/nipy,alexis-roche/niseg,alexis-roche/nireg,bthirion/nipy,arokem/nipy,bthirion/nipy,alexis-roche/nipy,alexis-roche/register,bthirion/nipy,alexis-roche/niseg,alexis-roche/register,nipy/nipy-labs,nipy/nireg
--- +++ @@ -4,7 +4,7 @@ __docformat__ = 'restructuredtext' -import fmri, hrf, utils +import fmri, hrf, utils, formula import fmristat from nipy.testing import Tester
4fd47cf73d59cb9e9d83cea12026878f65df858a
numscons/core/allow_undefined.py
numscons/core/allow_undefined.py
import os from subprocess import Popen, PIPE def get_darwin_version(): p = Popen(["sw_vers", "-productVersion"], stdout = PIPE, stderr = PIPE) st = p.wait() if st: raise RuntimeError( "Could not execute sw_vers -productVersion to get version") verstring = p.stdout.next() a, b, c = verstring.split(".") try: major = int(a) minor = int(b) micro = int(c) return major, minor, micro except ValueError: raise ValueError("Could not parse version string %s" % verstring) def get_darwin_allow_undefined(): """Return the list of flags to allow undefined symbols in a shared library. On MAC OS X, takes MACOSX_DEPLOYMENT_TARGET into account.""" major, minor, micro = get_darwin_version() if major == 10: if minor < 3: flag = ["-Wl,-undefined", "-Wl,suppress"] else: try: deptarget = os.environ['MACOSX_DEPLOYMENT_TARGET'] ma, mi = deptarget.split(".") if mi < 3: flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress'] else: flag = ['-Wl,-undefined', '-Wl,dynamic_lookup'] except KeyError: flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress'] else: # Non existing mac os x ? Just set to empty list flag = [] return flag
"""This module handle platform specific link options to allow undefined symbols in shared libraries and dynamically loaded libraries.""" import os from subprocess import Popen, PIPE def get_darwin_version(): p = Popen(["sw_vers", "-productVersion"], stdout = PIPE, stderr = PIPE) st = p.wait() if st: raise RuntimeError( "Could not execute sw_vers -productVersion to get version") verstring = p.stdout.next() a, b, c = verstring.split(".") try: major = int(a) minor = int(b) micro = int(c) return major, minor, micro except ValueError: raise ValueError("Could not parse version string %s" % verstring) def get_darwin_allow_undefined(): """Return the list of flags to allow undefined symbols in a shared library. On MAC OS X, takes MACOSX_DEPLOYMENT_TARGET into account.""" major, minor, micro = get_darwin_version() if major == 10: if minor < 3: flag = ["-Wl,-undefined", "-Wl,suppress"] else: try: deptarget = os.environ['MACOSX_DEPLOYMENT_TARGET'] ma, mi = deptarget.split(".") if mi < 3: flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress'] else: flag = ['-Wl,-undefined', '-Wl,dynamic_lookup'] except KeyError: flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress'] else: # Non existing mac os x ? Just set to empty list flag = [] return flag
Add docstring + fix missing import in allow_udnefined module.
Add docstring + fix missing import in allow_udnefined module.
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
--- +++ @@ -1,5 +1,6 @@ +"""This module handle platform specific link options to allow undefined symbols +in shared libraries and dynamically loaded libraries.""" import os - from subprocess import Popen, PIPE def get_darwin_version(): @@ -33,7 +34,8 @@ deptarget = os.environ['MACOSX_DEPLOYMENT_TARGET'] ma, mi = deptarget.split(".") if mi < 3: - flag = ['-Wl,-flat_namespace', '-Wl,-undefined', '-Wl,suppress'] + flag = ['-Wl,-flat_namespace', '-Wl,-undefined', + '-Wl,suppress'] else: flag = ['-Wl,-undefined', '-Wl,dynamic_lookup'] except KeyError:
4e0ec0fdf791fc9af1e83171b54054bd53d5536b
django_evolution/compat/apps.py
django_evolution/compat/apps.py
try: from django.apps.registry import apps get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return apps.get_app(app_label) else: return apps.get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
try: from django.apps.registry import apps # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache # Django < 1.7 get_apps = cache.get_apps apps = None def get_app(app_label, emptyOK=False): """Return the app with the given label. This returns the app from the app registry on Django >= 1.7, and from the old-style cache on Django < 1.7. The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: return get_app(app_label) else: return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
Fix the new get_app compatibility function.
Fix the new get_app compatibility function. The get_app compatibility function was trying to call get_apps() on the apps variable, instead of calling the extracted version that was pre-computed. Now it uses the correct versions.
Python
bsd-3-clause
beanbaginc/django-evolution
--- +++ @@ -1,11 +1,13 @@ try: from django.apps.registry import apps + # Django >= 1.7 get_apps = apps.get_apps cache = None except ImportError: from django.db.models.loading import cache + # Django < 1.7 get_apps = cache.get_apps apps = None @@ -19,9 +21,9 @@ The ``emptyOK`` argument is ignored for Django >= 1.7. """ if apps: - return apps.get_app(app_label) + return get_app(app_label) else: - return apps.get_app(app_label, emptyOK) + return get_app(app_label, emptyOK) __all__ = ['get_app', 'get_apps']
7e9db30ee426993b881357b0158d243d0a4c15c9
djlint/analyzers/db_backends.py
djlint/analyzers/db_backends.py
import ast from .base import BaseAnalyzer, Result class DB_BackendsVisitor(ast.NodeVisitor): def __init__(self): self.found = [] removed_items = { 'django.db.backends.postgresql': 'django.db.backends.postgresql_psycopg2', } def visit_Str(self, node): if node.s in self.removed_items.keys(): self.found.append((node.s, node)) class DB_BackendsAnalyzer(BaseAnalyzer): def analyze_file(self, filepath, code): if not isinstance(code, ast.AST): return visitor = DB_BackendsVisitor() visitor.visit(code) for name, node in visitor.found: propose = visitor.removed_items[name] result = Result( description = ( '%r database backend has beed deprecated in Django 1.3 ' 'and removed in 1.4. Use %r instead.' % (name, propose) ), path = filepath, line = node.lineno) lines = self.get_file_lines(filepath, node.lineno, node.lineno) for lineno, important, text in lines: result.source.add_line(lineno, text, important) result.solution.add_line(lineno, text.replace(name, propose), important) yield result
import ast from .base import BaseAnalyzer, Result class DB_BackendsVisitor(ast.NodeVisitor): def __init__(self): self.found = [] removed_items = { 'django.db.backends.postgresql': 'django.db.backends.postgresql_psycopg2', } def visit_Str(self, node): if node.s in self.removed_items.keys(): self.found.append((node.s, node)) class DB_BackendsAnalyzer(BaseAnalyzer): def analyze_file(self, filepath, code): if not isinstance(code, ast.AST): return visitor = DB_BackendsVisitor() visitor.visit(code) for name, node in visitor.found: propose = visitor.removed_items[name] result = Result( description = ( '%r database backend has beed deprecated in Django 1.2 ' 'and removed in 1.4. Use %r instead.' % (name, propose) ), path = filepath, line = node.lineno) lines = self.get_file_lines(filepath, node.lineno, node.lineno) for lineno, important, text in lines: result.source.add_line(lineno, text, important) result.solution.add_line(lineno, text.replace(name, propose), important) yield result
Fix database backends analyzer: 'postgresql' backend has been deprecated in 1.2
Fix database backends analyzer: 'postgresql' backend has been deprecated in 1.2
Python
isc
alfredhq/djlint
--- +++ @@ -30,7 +30,7 @@ propose = visitor.removed_items[name] result = Result( description = ( - '%r database backend has beed deprecated in Django 1.3 ' + '%r database backend has beed deprecated in Django 1.2 ' 'and removed in 1.4. Use %r instead.' % (name, propose) ), path = filepath,
82ae5e5cf3da57af771aa688ec7d951879423578
big_o/test/test_complexities.py
big_o/test/test_complexities.py
import unittest import numpy as np from numpy.testing import assert_array_almost_equal from big_o import complexities class TestComplexities(unittest.TestCase): def test_compute(self): x = np.linspace(10, 100, 100) y = 3.0 * x + 2.0 linear = complexities.Linear() linear.fit(x, y) assert_array_almost_equal(linear.compute(x), y, 10) def test_not_fitted(self): linear = complexities.Linear() self.assertRaises(complexities.NotFittedError, linear.compute, 100) def test_str_includes_units(self): x = np.linspace(10, 100, 100) y = 3.0 * x + 2.0 linear = complexities.Linear() linear.fit(x, y) linear_str = str(linear) assert '(sec)' in linear_str
import unittest import numpy as np from numpy.testing import assert_array_almost_equal from big_o import complexities class TestComplexities(unittest.TestCase): def test_compute(self): desired = [ (lambda x: 2.+x*0., complexities.Constant), (lambda x: 5.*x+3., complexities.Linear), (lambda x: 8.1*x**2.+0.9, complexities.Quadratic), (lambda x: 1.0*x**3+11.0, complexities.Cubic), (lambda x: 5.2*x**2.5, complexities.Polynomial), (lambda x: 8.5*np.log(x)+99.0, complexities.Logarithmic), (lambda x: 1.7*x*np.log(x)+2.74, complexities.Linearithmic), (lambda x: 3.14**x, complexities.Exponential) ] x = np.linspace(10, 100, 100) for f, class_ in desired: y = f(x) complexity = class_() complexity.fit(x, y) assert_array_almost_equal(complexity.compute(x), y, 10, "compute() failed to match expected values for class %r" % class_) def test_not_fitted(self): linear = complexities.Linear() self.assertRaises(complexities.NotFittedError, linear.compute, 100) def test_str_includes_units(self): x = np.linspace(10, 100, 100) y = 3.0 * x + 2.0 linear = complexities.Linear() linear.fit(x, y) linear_str = str(linear) assert '(sec)' in linear_str
Add compute test cases for all complexity classes
Add compute test cases for all complexity classes
Python
bsd-3-clause
pberkes/big_O
--- +++ @@ -8,11 +8,23 @@ class TestComplexities(unittest.TestCase): def test_compute(self): + desired = [ + (lambda x: 2.+x*0., complexities.Constant), + (lambda x: 5.*x+3., complexities.Linear), + (lambda x: 8.1*x**2.+0.9, complexities.Quadratic), + (lambda x: 1.0*x**3+11.0, complexities.Cubic), + (lambda x: 5.2*x**2.5, complexities.Polynomial), + (lambda x: 8.5*np.log(x)+99.0, complexities.Logarithmic), + (lambda x: 1.7*x*np.log(x)+2.74, complexities.Linearithmic), + (lambda x: 3.14**x, complexities.Exponential) + ] + x = np.linspace(10, 100, 100) - y = 3.0 * x + 2.0 - linear = complexities.Linear() - linear.fit(x, y) - assert_array_almost_equal(linear.compute(x), y, 10) + for f, class_ in desired: + y = f(x) + complexity = class_() + complexity.fit(x, y) + assert_array_almost_equal(complexity.compute(x), y, 10, "compute() failed to match expected values for class %r" % class_) def test_not_fitted(self): linear = complexities.Linear()
15551db6af7e32ec4cc13fee4b73cb95b5a3a774
runtests.py
runtests.py
import sys try: from django.conf import settings settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, ROOT_URLCONF="holonet.urls", INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sites", "holonet_django", ], SITE_ID=1, NOSE_ARGS=['-s'], ) try: import django setup = django.setup except AttributeError: pass else: setup() from django_nose import NoseTestSuiteRunner except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements-test.txt") def run_tests(*test_args): if not test_args: test_args = ['tests'] # Run tests test_runner = NoseTestSuiteRunner(verbosity=1) failures = test_runner.run_tests(test_args) if failures: sys.exit(failures) if __name__ == '__main__': run_tests(*sys.argv[1:])
import sys try: from django.conf import settings settings.configure( DEBUG=True, USE_TZ=True, DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, ROOT_URLCONF="holonet.urls", INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sites", "holonet_django", ], MIDDLEWARE_CLASSES=[], SITE_ID=1, NOSE_ARGS=['-s'], ) try: import django setup = django.setup except AttributeError: pass else: setup() from django_nose import NoseTestSuiteRunner except ImportError: import traceback traceback.print_exc() raise ImportError("To fix this error, run: pip install -r requirements-test.txt") def run_tests(*test_args): if not test_args: test_args = ['tests'] # Run tests test_runner = NoseTestSuiteRunner(verbosity=1) failures = test_runner.run_tests(test_args) if failures: sys.exit(failures) if __name__ == '__main__': run_tests(*sys.argv[1:])
Add MIDDLEWARE_CLASSES to settings, remove warnings
Add MIDDLEWARE_CLASSES to settings, remove warnings
Python
mit
webkom/django-holonet
--- +++ @@ -18,6 +18,7 @@ "django.contrib.sites", "holonet_django", ], + MIDDLEWARE_CLASSES=[], SITE_ID=1, NOSE_ARGS=['-s'], )
219c474860ca7674070ef19fa95f0282b7c92399
mpages/admin.py
mpages/admin.py
from django.contrib import admin from .models import Page, PageRead, Tag class PageAdmin(admin.ModelAdmin): search_fields = ["title"] list_display = ["title", "parent", "updated"] prepopulated_fields = {"slug": ("title",)} readonly_fields = ["updated"] ordering = ["parent", "title"] filter_horizontal = ("tags",) save_on_top = True fieldsets = ( ( None, { "fields": ( ("content",), ("title", "parent"), ("slug", "updated"), ("tags",), ) }, ), ) admin.site.register(Page, PageAdmin) admin.site.register(PageRead) admin.site.register(Tag)
from django.contrib import admin from .models import Page, PageRead, Tag class PageAdmin(admin.ModelAdmin): search_fields = ["title"] list_display = ["title", "parent", "updated"] prepopulated_fields = {"slug": ("title",)} readonly_fields = ["updated"] ordering = ["parent", "title"] filter_horizontal = ("tags",) save_on_top = True fieldsets = ( ( None, { "fields": ( ("content",), ("title", "parent"), ("slug", "updated"), ("tags",), ) }, ), ) def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "parent": kwargs["queryset"] = Page.objects.order_by("title") return super(PageAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) admin.site.register(Page, PageAdmin) admin.site.register(PageRead) admin.site.register(Tag)
Order parents in Admin select field
Order parents in Admin select field
Python
bsd-3-clause
ahernp/DMCM,ahernp/DMCM,ahernp/DMCM
--- +++ @@ -25,6 +25,11 @@ ), ) + def formfield_for_foreignkey(self, db_field, request, **kwargs): + if db_field.name == "parent": + kwargs["queryset"] = Page.objects.order_by("title") + return super(PageAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) + admin.site.register(Page, PageAdmin) admin.site.register(PageRead)
f76783ddb616c74e22feb003cb12952375cad658
corehq/apps/hqwebapp/encoders.py
corehq/apps/hqwebapp/encoders.py
import json import datetime from django.utils.encoding import force_unicode from django.utils.functional import Promise class LazyEncoder(json.JSONEncoder): """Taken from https://github.com/tomchristie/django-rest-framework/issues/87 This makes sure that ugettext_lazy refrences in a dict are properly evaluated """ def default(self, obj): if isinstance(obj, Promise): return force_unicode(obj) return super(LazyEncoder, self).default(obj)
import json import datetime from decimal import Decimal from django.utils.encoding import force_unicode from django.utils.functional import Promise class DecimalEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return str(obj) return super(DecimalEncoder, self).default(obj) class LazyEncoder(DecimalEncoder): """Taken from https://github.com/tomchristie/django-rest-framework/issues/87 This makes sure that ugettext_lazy refrences in a dict are properly evaluated """ def default(self, obj): if isinstance(obj, Promise): return force_unicode(obj) return super(LazyEncoder, self).default(obj)
Fix for json encoding Decimal values
Fix for json encoding Decimal values
Python
bsd-3-clause
SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
--- +++ @@ -1,10 +1,18 @@ import json import datetime +from decimal import Decimal from django.utils.encoding import force_unicode from django.utils.functional import Promise -class LazyEncoder(json.JSONEncoder): +class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return str(obj) + return super(DecimalEncoder, self).default(obj) + + +class LazyEncoder(DecimalEncoder): """Taken from https://github.com/tomchristie/django-rest-framework/issues/87 This makes sure that ugettext_lazy refrences in a dict are properly evaluated """
991973e554758e7a9881453d7668925902e610b9
tests.py
tests.py
#!/usr/bin/env python import unittest import git_mnemonic as gm class GitMnemonicTests(unittest.TestCase): def test_encode(self): self.assertTrue(gm.encode("master")) def test_decode(self): self.assertTrue(gm.decode("bis alo ama aha")) def test_invertible(self): once = gm.encode("master") self.assertEquals(gm.encode(gm.decode(once)), once) if __name__ == '__main__': unittest.main(verbosity=2)
#!/usr/bin/env python import unittest import git_mnemonic as gm class GitMnemonicTests(unittest.TestCase): def test_encode(self): self.assertTrue(gm.encode("master")) def test_decode(self): self.assertTrue(gm.decode("bis alo ama aha")) def test_invertible(self): once = gm.encode("master") self.assertEquals(gm.encode(gm.decode(once)), once) if __name__ == '__main__': suite = unittest.TestLoader().loadTestsFromTestCase(GitMnemonicTests) results = unittest.TextTestRunner(verbosity=2).run(suite) if not results.wasSuccessful(): import sys sys.exit(1)
Make unittest test runner work in older pythons
Make unittest test runner work in older pythons
Python
mit
glenjamin/git-mnemonic
--- +++ @@ -16,4 +16,8 @@ self.assertEquals(gm.encode(gm.decode(once)), once) if __name__ == '__main__': - unittest.main(verbosity=2) + suite = unittest.TestLoader().loadTestsFromTestCase(GitMnemonicTests) + results = unittest.TextTestRunner(verbosity=2).run(suite) + if not results.wasSuccessful(): + import sys + sys.exit(1)
cb08d25f49b8b4c5177c8afdd9a69330992ee854
tests/replay/test_replay.py
tests/replay/test_replay.py
# -*- coding: utf-8 -*- """ test_replay ----------- """ import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs)
# -*- coding: utf-8 -*- """ test_replay ----------- """ import pytest from cookiecutter import replay, main, exceptions def test_get_replay_file_name(): """Make sure that replay.get_file_name generates a valid json file path.""" assert replay.get_file_name('foo', 'bar') == 'foo/bar.json' @pytest.fixture(params=[ {'no_input': True}, {'extra_context': {}}, {'no_input': True, 'extra_context': {}}, ]) def invalid_kwargs(request): return request.param def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) def test_main_does_not_invoke_dump_but_load(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=True) assert not mock_prompt.called assert not mock_gen_context.called assert not mock_replay_dump.called assert mock_replay_load.called assert mock_gen_files.called def test_main_does_not_invoke_load_but_dump(mocker): mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') mock_gen_context = mocker.patch('cookiecutter.main.generate_context') mock_gen_files = mocker.patch('cookiecutter.main.generate_files') mock_replay_dump = mocker.patch('cookiecutter.main.dump') mock_replay_load = mocker.patch('cookiecutter.main.load') main.cookiecutter('foobar', replay=False) assert mock_prompt.called assert mock_gen_context.called assert mock_replay_dump.called assert not mock_replay_load.called assert mock_gen_files.called
Add tests for a correct behaviour in cookiecutter.main for replay
Add tests for a correct behaviour in cookiecutter.main for replay
Python
bsd-3-clause
christabor/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,cguardia/cookiecutter,pjbull/cookiecutter,dajose/cookiecutter,michaeljoseph/cookiecutter,moi65/cookiecutter,terryjbates/cookiecutter,takeflight/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,agconti/cookiecutter,cguardia/cookiecutter,christabor/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,stevepiercy/cookiecutter,takeflight/cookiecutter,pjbull/cookiecutter,benthomasson/cookiecutter,agconti/cookiecutter,benthomasson/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,audreyr/cookiecutter,moi65/cookiecutter,dajose/cookiecutter,hackebrot/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter,willingc/cookiecutter
--- +++ @@ -27,3 +27,35 @@ def test_raise_on_invalid_mode(invalid_kwargs): with pytest.raises(exceptions.InvalidModeException): main.cookiecutter('foo', replay=True, **invalid_kwargs) + + +def test_main_does_not_invoke_dump_but_load(mocker): + mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') + mock_gen_context = mocker.patch('cookiecutter.main.generate_context') + mock_gen_files = mocker.patch('cookiecutter.main.generate_files') + mock_replay_dump = mocker.patch('cookiecutter.main.dump') + mock_replay_load = mocker.patch('cookiecutter.main.load') + + main.cookiecutter('foobar', replay=True) + + assert not mock_prompt.called + assert not mock_gen_context.called + assert not mock_replay_dump.called + assert mock_replay_load.called + assert mock_gen_files.called + + +def test_main_does_not_invoke_load_but_dump(mocker): + mock_prompt = mocker.patch('cookiecutter.main.prompt_for_config') + mock_gen_context = mocker.patch('cookiecutter.main.generate_context') + mock_gen_files = mocker.patch('cookiecutter.main.generate_files') + mock_replay_dump = mocker.patch('cookiecutter.main.dump') + mock_replay_load = mocker.patch('cookiecutter.main.load') + + main.cookiecutter('foobar', replay=False) + + assert mock_prompt.called + assert mock_gen_context.called + assert mock_replay_dump.called + assert not mock_replay_load.called + assert mock_gen_files.called
9547988a1a9ef8faf22d9bfa881f4e542637fd46
utils.py
utils.py
import xmlrpclib import cPickle import subprocess from time import sleep p = None s = None def start_plot_server(): global p if p is None: p = subprocess.Popen(["python", "plot_server.py"]) def stop_plot_server(): if p is not None: p.terminate() sleep(0.01) p.kill() def plot_server_alive(): global s try: s.alive() except Exception, e: if str(e).endswith("Connection refused"): return False else: raise return True def establish_connection(): global s s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True) if not plot_server_alive(): start_plot_server() print "waiting for the plot server to start up..." while not plot_server_alive(): sleep(0.05) print " done." def plot(vert, triangles): print "plotting using mayavi..." v = cPickle.dumps(vert) t = cPickle.dumps(triangles) s.plot(v, t) print " done." establish_connection()
import xmlrpclib import cPickle import subprocess from time import sleep p = None s = None def start_plot_server(): global p if p is None: p = subprocess.Popen(["python", "plot_server.py"]) def stop_plot_server(): if p is not None: p.terminate() sleep(0.01) p.kill() def plot_server_alive(): global s try: s.alive() except Exception, e: if str(e).endswith("Connection refused"): return False else: raise return True def establish_connection(): global s if s is not None: return s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True) if not plot_server_alive(): start_plot_server() print "waiting for the plot server to start up..." while not plot_server_alive(): sleep(0.05) print " done." def plot(vert, triangles): establish_connection() print "plotting using mayavi..." v = cPickle.dumps(vert) t = cPickle.dumps(triangles) s.plot(v, t) print " done."
Establish connection only when needed
Establish connection only when needed
Python
bsd-3-clause
certik/mhd-hermes,certik/mhd-hermes
--- +++ @@ -31,6 +31,8 @@ def establish_connection(): global s + if s is not None: + return s = xmlrpclib.ServerProxy("http://localhost:8000/", allow_none=True) if not plot_server_alive(): start_plot_server() @@ -40,10 +42,10 @@ print " done." def plot(vert, triangles): + establish_connection() print "plotting using mayavi..." v = cPickle.dumps(vert) t = cPickle.dumps(triangles) s.plot(v, t) print " done." -establish_connection()
f3b9cc6392e4c271ae11417357ecdc196f1c3ae7
python_scripts/extractor_python_readability_server.py
python_scripts/extractor_python_readability_server.py
#!/usr/bin/python import sys import os import glob #sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py")) sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/")) sys.path.append(os.path.dirname(__file__) ) from thrift.transport import TSocket from thrift.server import TServer #import thrift_solr import ExtractorService import sys import readability import readability def extract_with_python_readability( raw_content ): doc = readability.Document( raw_content ) return [ u'' + doc.short_title(), u'' + doc.summary() ] class ExtractorHandler: def extract_html( self, raw_html ): #print raw_html #raw_html = raw_html.encode( 'utf-8' ) ret = extract_with_python_readability( raw_html ) #print ret[1] return ret handler = ExtractorHandler() processor = ExtractorService.Processor(handler) listening_socket = TSocket.TServerSocket(port=9090) server = TServer.TThreadPoolServer(processor, listening_socket) print ("[Server] Started") server.serve()
#!/usr/bin/python import sys import os import glob #sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py")) sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/")) sys.path.append(os.path.dirname(__file__) ) from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol from thrift.server import TServer from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated #import thrift_solr import ExtractorService import sys import readability import readability def extract_with_python_readability( raw_content ): doc = readability.Document( raw_content ) return [ u'' + doc.short_title(), u'' + doc.summary() ] class ExtractorHandler: def extract_html( self, raw_html ): #print raw_html #raw_html = raw_html.encode( 'utf-8' ) ret = extract_with_python_readability( raw_html ) #print ret[1] return ret handler = ExtractorHandler() processor = ExtractorService.Processor(handler) listening_socket = TSocket.TServerSocket(port=9090) tfactory = TTransport.TBufferedTransportFactory() #pfactory = TBinaryProtocol.TBinaryProtocolFactory() pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory() server = TServer.TThreadPoolServer(processor, listening_socket, tfactory, pfactory) print ("[Server] Started") server.serve()
Use the TBinaryProtocolAccelerated protocol instead of TBinaryProtocol to improve performance.
Use the TBinaryProtocolAccelerated protocol instead of TBinaryProtocol to improve performance.
Python
agpl-3.0
AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud
--- +++ @@ -8,7 +8,11 @@ sys.path.append(os.path.dirname(__file__) ) from thrift.transport import TSocket +from thrift.transport import TTransport +from thrift.protocol import TBinaryProtocol from thrift.server import TServer +from thrift.protocol.TBinaryProtocol import TBinaryProtocolAccelerated + #import thrift_solr @@ -40,7 +44,11 @@ handler = ExtractorHandler() processor = ExtractorService.Processor(handler) listening_socket = TSocket.TServerSocket(port=9090) -server = TServer.TThreadPoolServer(processor, listening_socket) +tfactory = TTransport.TBufferedTransportFactory() +#pfactory = TBinaryProtocol.TBinaryProtocolFactory() +pfactory = TBinaryProtocol.TBinaryProtocolAcceleratedFactory() + +server = TServer.TThreadPoolServer(processor, listening_socket, tfactory, pfactory) print ("[Server] Started") server.serve()
a6034ffa4d81bb57c5d86876ee72e0436426e6d2
imhotep_foodcritic/plugin.py
imhotep_foodcritic/plugin.py
from imhotep.tools import Tool from collections import defaultdict import json import os import logging log = logging.getLogger(__name__) class FoodCritic(Tool): def invoke(self, dirname, filenames=set(), config_file=None, file_list=None): retval = defaultdict(lambda: defaultdict(list)) if file_list is None: cmd = "find %s/cookbooks -type d -maxdepth 1 ! -path %s/cookbooks | xargs foodcritic" % (dirname, dirname) else: cmd = "foodcritic %s" % (" ".join(file_list)) log.debug("Command: %s", cmd) try: output = self.executor(cmd) for line in output.split('\n'): rule, message, file_name, line_number = line.split(':') file_name = file_name.lstrip() file_name = file_name.replace(dirname, "")[1:] message = "[%s](http://acrmp.github.io/foodcritic/#%s): %s" % (rule, rule, message) retval[file_name][line_number].append(message) except: pass return retval
from imhotep.tools import Tool from collections import defaultdict import json import os import logging log = logging.getLogger(__name__) class FoodCritic(Tool): def invoke(self, dirname, filenames=set(), config_file=None): retval = defaultdict(lambda: defaultdict(list)) if len(filenames) == 0: cmd = "find %s/cookbooks -type d -maxdepth 1 ! -path %s/cookbooks | xargs foodcritic" % (dirname, dirname) else: filenames = ["%s/%s" % (dirname, "/".join(filename.split('/')[:2])) for filename in filenames] cmd = "foodcritic %s" % (" ".join(filenames)) log.debug("Command: %s", cmd) try: output = self.executor(cmd) for line in output.split('\n'): rule, message, file_name, line_number = line.split(':') file_name = file_name.lstrip() file_name = file_name.replace(dirname, "")[1:] message = "[%s](http://acrmp.github.io/foodcritic/#%s): %s" % (rule, rule, message) retval[file_name][line_number].append(message) except: pass return retval
Update support for passing in filenames.
Update support for passing in filenames. This shit is gross
Python
mit
scottjab/imhotep_foodcritic
--- +++ @@ -12,13 +12,13 @@ def invoke(self, dirname, filenames=set(), - config_file=None, - file_list=None): + config_file=None): retval = defaultdict(lambda: defaultdict(list)) - if file_list is None: + if len(filenames) == 0: cmd = "find %s/cookbooks -type d -maxdepth 1 ! -path %s/cookbooks | xargs foodcritic" % (dirname, dirname) else: - cmd = "foodcritic %s" % (" ".join(file_list)) + filenames = ["%s/%s" % (dirname, "/".join(filename.split('/')[:2])) for filename in filenames] + cmd = "foodcritic %s" % (" ".join(filenames)) log.debug("Command: %s", cmd) try: output = self.executor(cmd)
3ce1b928f36c314ab07c334843b2db96626f469e
kyokai/asphalt.py
kyokai/asphalt.py
""" Asphalt framework mixin for Kyokai. """ import logging import asyncio from functools import partial from typing import Union from asphalt.core import Component, resolve_reference, Context from typeguard import check_argument_types from kyokai.app import Kyokai from kyokai.protocol import KyokaiProtocol from kyokai.context import HTTPRequestContext logger = logging.getLogger("Kyokai") class KyoukaiComponent(Component): def __init__(self, app: Union[str, Kyokai], ip: str = '0.0.0.0', port: int = 4444, **cfg): assert check_argument_types() if not isinstance(app, Kyokai): self.app = resolve_reference(app) else: self.app = app self.ip = ip self.port = port self._extra_cfg = cfg # Set HTTPRequestContext's `cfg` val to the extra config. HTTPRequestContext.cfg = self._extra_cfg self.server = None self.app.reconfigure(cfg) def get_protocol(self, ctx: Context): return KyokaiProtocol(self.app, ctx) async def start(self, ctx: Context): """ Starts a Kyokai server. """ protocol = self.get_protocol(ctx) self.server = await asyncio.get_event_loop().create_server(protocol, self.ip, self.port) logger.info("Kyokai serving on {}:{}.".format(self.ip, self.port))
""" Asphalt framework mixin for Kyokai. """ import logging import asyncio from functools import partial from typing import Union from asphalt.core import Component, resolve_reference, Context from typeguard import check_argument_types from kyokai.app import Kyokai from kyokai.protocol import KyokaiProtocol from kyokai.context import HTTPRequestContext logger = logging.getLogger("Kyokai") class KyoukaiComponent(Component): def __init__(self, app: Union[str, Kyokai], ip: str = '0.0.0.0', port: int = 4444, **cfg): assert check_argument_types() if not isinstance(app, Kyokai): self.app = resolve_reference(app) else: self.app = app self.ip = ip self.port = port self._extra_cfg = cfg # Set HTTPRequestContext's `cfg` val to the extra config. HTTPRequestContext.cfg = self._extra_cfg self.server = None self.app.reconfigure(cfg) def get_protocol(self, ctx: Context): return KyokaiProtocol(self.app, ctx) async def start(self, ctx: Context): """ Starts a Kyokai server. """ protocol = partial(self.get_protocol, ctx) self.server = await asyncio.get_event_loop().create_server(protocol, self.ip, self.port) logger.info("Kyokai serving on {}:{}.".format(self.ip, self.port))
Make this into a partial to get the protocol correctly.
Make this into a partial to get the protocol correctly.
Python
mit
SunDwarf/Kyoukai
--- +++ @@ -42,6 +42,6 @@ """ Starts a Kyokai server. """ - protocol = self.get_protocol(ctx) + protocol = partial(self.get_protocol, ctx) self.server = await asyncio.get_event_loop().create_server(protocol, self.ip, self.port) logger.info("Kyokai serving on {}:{}.".format(self.ip, self.port))
b352c3e1f5e8812d29f2e8a1bca807bea5da8cc4
test/test_hx_launcher.py
test/test_hx_launcher.py
import pytest_twisted from hendrix.ux import main from hendrix.options import HendrixOptionParser def test_no_arguments_gives_help_text(mocker): class MockFile(object): @classmethod def write(cls, whatever): cls.things_written = whatever class MockStdOut(object): @classmethod def write(cls, whatever): HendrixOptionParser.print_help(MockFile) assert MockFile.things_written == whatever mocker.patch('sys.stdout', new=MockStdOut) main([])
from hendrix.options import HendrixOptionParser from hendrix.ux import main def test_no_arguments_gives_help_text(mocker): class MockFile(object): @classmethod def write(cls, whatever): cls.things_written = whatever class MockStdOut(object): @classmethod def write(cls, whatever): HendrixOptionParser.print_help(MockFile) assert MockFile.things_written == whatever mocker.patch('sys.stdout', new=MockStdOut) main([])
Test for the hx launcher.
Test for the hx launcher.
Python
mit
hangarunderground/hendrix,hendrix/hendrix,hangarunderground/hendrix,hendrix/hendrix,jMyles/hendrix,hendrix/hendrix,jMyles/hendrix,hangarunderground/hendrix,hangarunderground/hendrix,jMyles/hendrix
--- +++ @@ -1,11 +1,8 @@ -import pytest_twisted - +from hendrix.options import HendrixOptionParser from hendrix.ux import main -from hendrix.options import HendrixOptionParser def test_no_arguments_gives_help_text(mocker): - class MockFile(object): @classmethod def write(cls, whatever):
ad21c9255f6246944cd032ad50082c0aca46fcb3
neurokernel/tools/mpi.py
neurokernel/tools/mpi.py
#!/usr/bin/env python """ MPI utilities. """ from mpi4py import MPI import twiggy class MPIOutput(twiggy.outputs.Output): """ Output messages to a file via MPI I/O. """ def __init__(self, name, format, comm, mode=MPI.MODE_CREATE | MPI.MODE_WRONLY, close_atexit=True): self.filename = name self._format = format if format is not None else self._noop_format self.comm = comm self.mode = mode super(MPIOutput, self).__init__(format, close_atexit) def _open(self): self.file = MPI.File.Open(self.comm, self.filename, self.mode) def _close(self): self.file.Close() def _write(self, x): self.file.Iwrite_shared(x)
#!/usr/bin/env python """ MPI utilities. """ from mpi4py import MPI import twiggy class MPIOutput(twiggy.outputs.Output): """ Output messages to a file via MPI I/O. """ def __init__(self, name, format, comm, mode=MPI.MODE_CREATE | MPI.MODE_WRONLY, close_atexit=True): self.filename = name self._format = format if format is not None else self._noop_format self.comm = comm self.mode = mode super(MPIOutput, self).__init__(format, close_atexit) def _open(self): self.file = MPI.File.Open(self.comm, self.filename, self.mode) def _close(self): self.file.Close() def _write(self, x): self.file.Iwrite_shared(x) # This seems to be necessary to prevent some log lines from being lost: self.file.Sync()
Call MPIOutput.file.Sync() in MPIOutput.file._write() to prevent log lines from intermittently being lost.
Call MPIOutput.file.Sync() in MPIOutput.file._write() to prevent log lines from intermittently being lost.
Python
bsd-3-clause
cerrno/neurokernel
--- +++ @@ -31,3 +31,5 @@ def _write(self, x): self.file.Iwrite_shared(x) + # This seems to be necessary to prevent some log lines from being lost: + self.file.Sync()