commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
5edb070308e2597047f82ecb44cb84b314b488c9
qotr/handlers/base.py
qotr/handlers/base.py
import logging from fnmatch import fnmatch from tornado import web from qotr.config import config L = logging.getLogger(__name__) ALLOWED_ORIGINS = [o.strip() for o in config.allowed_origin.split(',')] def set_cors_headers(handler): ''' Given a handler, set the CORS headers on it. ''' origin = handler.request.headers.get('Origin') L.debug('Setting CORS headers for: %s based on %s', origin, ALLOWED_ORIGINS) if origin in ALLOWED_ORIGINS or any(fnmatch(origin, o) for o in ALLOWED_ORIGINS): handler.set_header("Access-Control-Allow-Origin", origin) handler.set_header("Access-Control-Allow-Headers", "Content-Type") # pylint: disable=W0223 class Base(web.RequestHandler): ''' A base request handler. ''' def prepare(self): protocol = self.request.headers.get('x-forwarded-proto') if config.redirect_to_https and \ self.request.method == 'GET' and \ protocol == 'http': self.redirect('https://{}{}'.format( self.request.host.split(':')[0], self.request.path ), permanent=True)
import logging from fnmatch import fnmatch from tornado import web from qotr.config import config L = logging.getLogger(__name__) ALLOWED_ORIGINS = [o.strip() for o in config.allowed_origin.split(',')] def set_cors_headers(handler): ''' Given a handler, set the CORS headers on it. ''' origin = handler.request.headers.get('Origin', '') L.debug('Setting CORS headers for: %s based on %s', origin, ALLOWED_ORIGINS) if origin in ALLOWED_ORIGINS or any(fnmatch(origin, o) for o in ALLOWED_ORIGINS): handler.set_header("Access-Control-Allow-Origin", origin) handler.set_header("Access-Control-Allow-Headers", "Content-Type") # pylint: disable=W0223 class Base(web.RequestHandler): ''' A base request handler. ''' def prepare(self): protocol = self.request.headers.get('x-forwarded-proto') if config.redirect_to_https and \ self.request.method == 'GET' and \ protocol == 'http': self.redirect('https://{}{}'.format( self.request.host.split(':')[0], self.request.path ), permanent=True)
Use blank string instead of None as default origin
Use blank string instead of None as default origin Signed-off-by: Rohan Jain <[email protected]>
Python
agpl-3.0
crodjer/qotr,sbuss/qotr,crodjer/qotr,sbuss/qotr,rmoorman/qotr,curtiszimmerman/qotr,curtiszimmerman/qotr,sbuss/qotr,rmoorman/qotr,crodjer/qotr,sbuss/qotr,curtiszimmerman/qotr,rmoorman/qotr,crodjer/qotr,curtiszimmerman/qotr,rmoorman/qotr
0adadcb3f04e2ecb98b5ca5de1afba2ba7208d23
spacy/tests/parser/test_beam_parse.py
spacy/tests/parser/test_beam_parse.py
import spacy import pytest @pytest.mark.models def test_beam_parse(): nlp = spacy.load('en_core_web_sm') doc = nlp(u'Australia is a country', disable=['ner']) ents = nlp.entity(doc, beam_width=2) print(ents)
# coding: utf8 from __future__ import unicode_literals import pytest @pytest.mark.models('en') def test_beam_parse(EN): doc = EN(u'Australia is a country', disable=['ner']) ents = EN.entity(doc, beam_width=2) print(ents)
Fix beam parse model test
Fix beam parse model test
Python
mit
aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy
1f2deb95ba543bf05dd78f1df2e9ee6d17a2c4c3
buffer/tests/test_profiles_manager.py
buffer/tests/test_profiles_manager.py
import json from nose.tools import eq_, raises from mock import MagicMock, patch from buffer.managers.profiles import Profiles from buffer.models.profile import PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profiles_manager_all_method(): ''' Test basic profiles retrieving ''' mocked_api = MagicMock() mocked_api.get.return_value = [{'a':'b'}] with patch('buffer.managers.profiles.Profile') as mocked_profile: mocked_profile.return_value = 1 profiles = Profiles(api=mocked_api).all() eq_(profiles, [1]) mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES']) mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'})
import json from nose.tools import eq_, raises from mock import MagicMock, patch from buffer.managers.profiles import Profiles from buffer.models.profile import Profile, PATHS mocked_response = { 'name': 'me', 'service': 'twiter', 'id': 1 } def test_profiles_manager_all_method(): ''' Test basic profiles retrieving ''' mocked_api = MagicMock() mocked_api.get.return_value = [{'a':'b'}] with patch('buffer.managers.profiles.Profile') as mocked_profile: mocked_profile.return_value = 1 profiles = Profiles(api=mocked_api).all() eq_(profiles, [1]) mocked_api.get.assert_called_once_with(url=PATHS['GET_PROFILES']) mocked_profile.assert_called_once_with(mocked_api, {'a': 'b'}) def test_profiles_manager_filter_method(): ''' Test basic profiles filtering based on some minimal criteria ''' mocked_api = MagicMock() profiles = Profiles(mocked_api, [{'a':'b'}, {'a': 'c'}]) eq_(profiles.filter(a='b'), [{'a': 'b'}]) def test_profiles_manager_filter_method_empty(): ''' Test basic profiles filtering when the manager is empty ''' mocked_api = MagicMock() mocked_api.get.return_value = [{'a':'b'}, {'a': 'c'}] profiles = Profiles(api=mocked_api) eq_(profiles.filter(a='b'), [Profile(mocked_api, {'a': 'b'})])
Test profiles manager filterting method
Test profiles manager filterting method
Python
mit
vtemian/buffpy,bufferapp/buffer-python
7469a750a7303b346a91376ae16dc42b69208c18
pm/utils/filesystem.py
pm/utils/filesystem.py
""" Filesystem utilities """ import contextlib import os RUN_RE = '\d{6}_[a-zA-Z\d]+_\d{4}_[AB][A-Z\d]{9}' @contextlib.contextmanager def chdir(new_dir): """Context manager to temporarily change to a new directory. """ cur_dir = os.getcwd() # This is weird behavior. I'm removing and and we'll see if anything breaks. #safe_makedir(new_dir) os.chdir(new_dir) try: yield finally: os.chdir(cur_dir)
""" Filesystem utilities """ import contextlib import os RUN_RE = '\d{6}_[a-zA-Z\d\-]+_\d{4}_[AB][A-Z\d]{9}' @contextlib.contextmanager def chdir(new_dir): """Context manager to temporarily change to a new directory. """ cur_dir = os.getcwd() # This is weird behavior. I'm removing and and we'll see if anything breaks. #safe_makedir(new_dir) os.chdir(new_dir) try: yield finally: os.chdir(cur_dir)
Support for X-Ten machine IDs
Support for X-Ten machine IDs
Python
mit
SciLifeLab/TACA,SciLifeLab/TACA,kate-v-stepanova/TACA,senthil10/TACA,b97pla/TACA,guillermo-carrasco/TACA,kate-v-stepanova/TACA,SciLifeLab/TACA,senthil10/TACA,vezzi/TACA,b97pla/TACA,guillermo-carrasco/TACA,vezzi/TACA
8d3931fd5effabf9c5d56cb03ae15630ae984963
postalcodes_mexico/cli.py
postalcodes_mexico/cli.py
# -*- coding: utf-8 -*- """Console script for postalcodes_mexico.""" import sys import click @click.command() def main(args=None): """Console script for postalcodes_mexico.""" click.echo("Replace this message by putting your code into " "postalcodes_mexico.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
# -*- coding: utf-8 -*- """Console script for postalcodes_mexico.""" import sys import click from postalcodes_mexico import postalcodes_mexico @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
Create simple CLI for the `places` function
Create simple CLI for the `places` function
Python
mit
FlowFX/postalcodes_mexico
006b645315190eb532ede9c36c77a7fbc4c27237
quotations/apps/api/v1.py
quotations/apps/api/v1.py
from tastypie.authorization import DjangoAuthorization from tastypie import fields from tastypie.resources import ModelResource, ALL_WITH_RELATIONS from quotations.apps.quotations import models as quotations_models from quotations.libs.auth import MethodAuthentication from quotations.libs.serializers import Serializer class BaseMeta(object): serializer = Serializer() authentication = MethodAuthentication() authorization = DjangoAuthorization() class AuthorResource(ModelResource): class Meta(BaseMeta): queryset = quotations_models.Author.objects.all() resource_name = 'authors' filtering = { 'name': ['exact', 'contains'] } class QuotationResource(ModelResource): author = fields.ForeignKey(AuthorResource, 'author', full=True) class Meta(BaseMeta): queryset = quotations_models.Quotation.objects.all() resource_name = 'quotations' filtering = { 'text': ['contains'], 'author': ALL_WITH_RELATIONS }
from tastypie.authorization import DjangoAuthorization from tastypie import fields from tastypie.resources import ModelResource, ALL_WITH_RELATIONS from quotations.apps.quotations import models as quotations_models from quotations.libs.auth import MethodAuthentication from quotations.libs.serializers import Serializer class BaseMeta(object): serializer = Serializer() authentication = MethodAuthentication() authorization = DjangoAuthorization() class AuthorResource(ModelResource): class Meta(BaseMeta): queryset = quotations_models.Author.objects.all() resource_name = 'authors' filtering = { 'name': ['exact', 'contains'] } class QuotationResource(ModelResource): author = fields.ForeignKey(AuthorResource, 'author', full=True) class Meta(BaseMeta): queryset = quotations_models.Quotation.objects.all() resource_name = 'quotations' filtering = { 'text': ['contains'], 'author': ALL_WITH_RELATIONS } def get_object_list(self, request): object_list = super(QuotationResource, self).get_object_list(request) if request.GET.get('random', False): object_list = object_list.order_by('?') return object_list
Allow retrieval of a random quote
Allow retrieval of a random quote
Python
mit
jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted,jessamynsmith/underquoted,jessamynsmith/socialjusticebingo,jessamynsmith/underquoted
8be6b576007f89fad50ea1dfacad46614c0a97c5
apps/domain/src/main/core/exceptions.py
apps/domain/src/main/core/exceptions.py
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class EnvironmentNotFoundError(PyGridError): def __init__(self): message = "Environment not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
ADD new exception -> EnvironmentNotFound!
ADD new exception -> EnvironmentNotFound!
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
8c65f7e2e075bae468401a1eee799251c1d928df
raspberry_pi/config.py
raspberry_pi/config.py
#!/usr/bin/env python2 ''' More complex configuration, to run in addition to 'config.sh'. ''' if raw_input('Set USB sound card as default ? [y][N]') in 'y', 'Y', 'yes': original = open('/etc/modprobe.d/alsa-base.conf').read() modified = original.replace('options snd-usb-audio index=-2', 'options snd-usb-audio index=0') open('/etc/modprobe.d/alsa-base.conf', 'w').write(modified) print("Default sound card set to USB via '/etc/modprobe.d/alsa-base.conf'.")
#!/usr/bin/env python2 ''' More complex configuration, to run in addition to 'config.sh'. ''' if raw_input('Set USB sound card as default ? [y][N]') in 'y', 'Y', 'yes': original = open('/etc/modprobe.d/alsa-base.conf').read() modified = original.replace('options snd-usb-audio index=-2', 'options snd-usb-audio index=0') open('/etc/modprobe.d/alsa-base.conf', 'w').write(modified) print("Default sound card set to USB via '/etc/modprobe.d/alsa-base.conf'.") if raw_input('Keep crazy logs due to USB sound in /var/log/debug and kernel ? [y][N]') not in 'y', 'Y', 'yes': # Documentation from http://root42.blogspot.be/2013/04/delay-warnings-when-using-usb-audio-on.html open('/etc/modprobe.d/snd_usb_audio.conf', 'a').write('\noptions snd-usb-audio nrpacks=1\n') print("Anti-log option added.")
Fix for Delay warnings when using USB audio (untested)
Fix for Delay warnings when using USB audio (untested)
Python
agpl-3.0
oksome/Home,oksome/Home,oksome/Home
e0b82cf9ed24870cb313328e5539acc5fe7f6508
stock_awesome/levels/chock_a_block.py
stock_awesome/levels/chock_a_block.py
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('WEB29978261', 'NOWUEX', 'BBCM') #collection of orders placed orders = {} filled = 0 upper_limit = 3300 #try to buy 100000 to_send = 1000 while to_send > 0: quote = m.quote() ask = quote.get('ask') if ask and ask < upper_limit: r = m.buy(quote['askSize'], quote['ask'], order_type='fill-or-kill') to_send -= 1 orders[r['id']] = r orders = update_orders(m, orders) filled += update_filled(orders) else: time.sleep(1) def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
import time from stock_awesome.obj import market def main(): """ Algorithm: Wait for an ask, then send a fill or kill for the quantity of the ask at the ask price. """ m = market.StockAPI('RAJ40214463', 'SSMCEX', 'IPSO') #collection of orders placed orders = {} filled = 0 upper_limit = 2450 #try to buy 100000 to_buy = 100000 while to_buy > 0: quote = m.quote() ask = quote.get('ask', 0) bid = quote.get('bid') if ask < upper_limit: r = m.buy(quote['askSize'], ask, order_type='fill-or-kill') to_buy -= r['totalFilled'] print("Bought {}, {} remaining".format(r['totalFilled'], to_buy)) else: time.sleep(1) print('done') def update_orders(m, orders): """ update order status """ return {o: m.order_status(o) for o in orders} def update_filled(orders): """ Remove filled orders and update our count. """ closed = [o for o in orders if not orders[o]['open']] #remove and sum filled orders filled = sum(orders.pop(o)['totalFilled'] for o in closed) return filled if __name__ == '__main__': main()
Add some (inefective) score maximizing attempts
Add some (inefective) score maximizing attempts
Python
mit
ForeverWintr/stock_awesome
89193a6571dd74501533160b409cad8835c51625
gcframe/tests/urls.py
gcframe/tests/urls.py
# -*- coding: utf-8 -*- """ Simple urls for use in testing the gcframe app. """ from __future__ import unicode_literals # The defaults module is deprecated in Django 1.5, but necessary to # support Django 1.3. drop ``.defaults`` when dropping 1.3 support. from django.conf.urls.defaults import patterns, url from .views import normal, framed, exempt urlpatterns = patterns('', url(r'normal/$', normal, name='gcframe-test-normal'), url(r'framed/$', framed, name='gcframe-test-framed'), url(r'exempt/$', exempt, name='gcframe-test-exempt'), )
# -*- coding: utf-8 -*- """ Simple urls for use in testing the gcframe app. """ from __future__ import unicode_literals try: from django.conf.urls import patterns, url except ImportError: # Django 1.3 from django.conf.urls.defaults import patterns, url from .views import normal, framed, exempt urlpatterns = patterns('', url(r'normal/$', normal, name='gcframe-test-normal'), url(r'framed/$', framed, name='gcframe-test-framed'), url(r'exempt/$', exempt, name='gcframe-test-exempt'), )
Handle a Django deprecation properly.
Handle a Django deprecation properly. Should have done this in commit cb4eae7b7.
Python
bsd-3-clause
benspaulding/django-gcframe
242b3bfe70d90044d2496cbc8109cd703b3bccab
greengraph/command.py
greengraph/command.py
from argparse import ArgumentParser from matplotlib import pyplot as plt from graph import Greengraph def process(): parser = ArgumentParser( description="Produce graph of green land between two locations") parser.add_argument("--start", required=True, help="The starting location ") parser.add_argument("--end", required=True, help="The ending location") parser.add_argument("--steps", required=True, help="The number of steps between the starting and ending locations") parser.add_argument("--out", required=True, help="The output filename") arguments = parser.parse_args() mygraph = Greengraph(arguments.start, arguments.end) data = mygraph.green_between(arguments.steps) plt.plot(data) # TODO add a title and axis labels to this graph plt.savefig(arguments.out) if __name__ == "__main__": process()
from argparse import ArgumentParser from matplotlib import pyplot as plt from graph import Greengraph def process(): parser = ArgumentParser( description="Produce graph quantifying the amount of green land between two locations") parser.add_argument("--start", required=True, help="The starting location ") parser.add_argument("--end", required=True, help="The ending location") parser.add_argument("--steps", help="The number of steps between the starting and ending locations, defaults to 10") parser.add_argument("--out", help="The output filename, defaults to graph.png") arguments = parser.parse_args() mygraph = Greengraph(arguments.start, arguments.end) if arguments.steps: data = mygraph.green_between(arguments.steps) else: data = mygraph.green_between(10) plt.plot(data) # TODO add a title and axis labels to this graph if arguments.out: plt.savefig(arguments.out) else: plt.savefig("graph.png") if __name__ == "__main__": process()
Make steps and out arguments optional and add defaults
Make steps and out arguments optional and add defaults
Python
mit
MikeVasmer/GreenGraphCoursework
6bfb23294c2cc445479f4c8098b8e62647cf01bd
test/test_notification_integration.py
test/test_notification_integration.py
import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = watcher.read() client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() watcher.kill()
import os import select import groundstation.fs_watcher as fs_watcher from groundstation.peer_socket import PeerSocket from groundstation.utils import path2id from integration_fixture import StationIntegrationFixture, \ TestListener, \ TestClient class StationFSWatcherIntegration(StationIntegrationFixture): def test_notifies_peer(self): read_sockets = [] write_sockets = [] def tick(): return select.select(read_sockets, write_sockets, [], 1) addr = os.path.join(self.dir, "listener") listener = TestListener(addr) client = TestClient(addr) peer = listener.accept(PeerSocket) watcher = fs_watcher.FSWatcher(self.stations[0].store.object_root) read_sockets.append(client) read_sockets.append(watcher) self.stations[0].write("trolololol") (sread, _, _) = tick() self.assertIn(watcher, sread) obj_name = path2id(watcher.read()) client.notify_new_object(self.stations[0], obj_name) client.send() peer.recv() data = peer.packet_queue.pop() gizmo = self.stations[1].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" gizmo.process() peer.send() client.recv() data = client.packet_queue.pop() gizmo = self.stations[0].gizmo_factory.hydrate(data, peer) assert gizmo is not None, "gizmo_factory returned None" self.assertEqual(gizmo.verb, "FETCHOBJECT") self.assertEqual(gizmo.payload, obj_name) gizmo.process() watcher.kill()
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
Validate that we can translate a NEWOBJECT into a FETCHOBJECT
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
d6e87778c82eecc07b73a91d50cc2d9034a4428c
judicious/__init__.py
judicious/__init__.py
# -*- coding: utf-8 -*- """Top-level package for judicious.""" __author__ = """Jordan W. Suchow""" __email__ = '[email protected]' __version__ = '0.1.0' from judicious import ( BASE_URL, register, ) __all__ = ( "BASE_URL", "register", )
# -*- coding: utf-8 -*- """Top-level package for judicious.""" __author__ = """Jordan W. Suchow""" __email__ = '[email protected]' __version__ = '0.1.0' from .judicious import ( BASE_URL, register, ) __all__ = ( "BASE_URL", "register", )
Fix imports for Python 2 & 3 compatibility
Fix imports for Python 2 & 3 compatibility
Python
mit
suchow/judicious,suchow/judicious,suchow/judicious
6fe5a416ed229e7ec8efab9d6b3dac43f16515b6
corehq/apps/domain/__init__.py
corehq/apps/domain/__init__.py
from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta'))
from corehq.preindex import ExtraPreindexPlugin from django.conf import settings ExtraPreindexPlugin.register('domain', __file__, ( settings.NEW_DOMAINS_DB, settings.NEW_USERS_GROUPS_DB, settings.NEW_FIXTURES_DB, 'meta', ))
Add the new domains db
Add the new domains db
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
1de53534c48d1eecc7fea5d2040977afd97dacb2
vispy/visuals/graphs/layouts/__init__.py
vispy/visuals/graphs/layouts/__init__.py
import inspect from .random import random from .circular import circular from .force_directed import fruchterman_reingold _layout_map = { 'random': random, 'circular': circular, 'force_directed': fruchterman_reingold, 'spring_layout': fruchterman_reingold } AVAILABLE_LAYOUTS = _layout_map.keys() def get_layout(name, *args, **kwargs): """ Retrieve a graph layout Some graph layouts accept extra options. Please refer to their documentation for more information. Parameters ---------- name : string The name of the layout. The variable `AVAILABLE_LAYOUTS` contains all available layouts. *args Positional arguments which are passed to the layout. **kwargs Keyword arguments which are passed to the layout. Returns ------- layout : callable The callable generator which will calculate the graph layout """ if name not in _layout_map: raise KeyError( "Graph layout '{}' not found. Should be one of {}".format( name, ", ".join(AVAILABLE_LAYOUTS) ) ) layout = _layout_map[name] if inspect.isclass(layout): layout = layout(*args, **kwargs) return layout
import inspect from .random import random from .circular import circular from .force_directed import fruchterman_reingold _layout_map = { 'random': random, 'circular': circular, 'force_directed': fruchterman_reingold, 'spring_layout': fruchterman_reingold } AVAILABLE_LAYOUTS = tuple(_layout_map.keys()) def get_layout(name, *args, **kwargs): """ Retrieve a graph layout Some graph layouts accept extra options. Please refer to their documentation for more information. Parameters ---------- name : string The name of the layout. The variable `AVAILABLE_LAYOUTS` contains all available layouts. *args Positional arguments which are passed to the layout. **kwargs Keyword arguments which are passed to the layout. Returns ------- layout : callable The callable generator which will calculate the graph layout """ if name not in _layout_map: raise KeyError( "Graph layout '{}' not found. Should be one of {}".format( name, ", ".join(AVAILABLE_LAYOUTS) ) ) layout = _layout_map[name] if inspect.isclass(layout): layout = layout(*args, **kwargs) return layout
Make sure AVAILABLE_LAYOUTS is a tuple
Make sure AVAILABLE_LAYOUTS is a tuple The .keys() function is a generator in Python 3.
Python
bsd-3-clause
ghisvail/vispy,drufat/vispy,drufat/vispy,Eric89GXL/vispy,ghisvail/vispy,michaelaye/vispy,Eric89GXL/vispy,drufat/vispy,michaelaye/vispy,ghisvail/vispy,Eric89GXL/vispy,michaelaye/vispy
e12432b0c97d1ddebf16df821fe6c77bb8b6a66b
wagtail/wagtailsites/wagtail_hooks.py
wagtail/wagtailsites/wagtail_hooks.py
from django.conf.urls import include, url from django.core import urlresolvers from django.utils.translation import ugettext_lazy as _ from wagtail.wagtailcore import hooks from wagtail.wagtailadmin.menu import MenuItem from wagtail.wagtailsites import urls def register_admin_urls(): return [ url(r'^sites/', include(urls)), ] hooks.register('register_admin_urls', register_admin_urls) def construct_main_menu(request, menu_items): if request.user.is_superuser: menu_items.append( MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602) ) hooks.register('construct_main_menu', construct_main_menu)
from django.conf.urls import include, url from django.core import urlresolvers from django.utils.translation import ugettext_lazy as _ from wagtail.wagtailcore import hooks from wagtail.wagtailadmin.menu import MenuItem from wagtail.wagtailsites import urls @hooks.register('register_admin_urls') def register_admin_urls(): return [ url(r'^sites/', include(urls)), ] class SitesMenuItem(MenuItem): def is_shown(self, request): return request.user.is_superuser @hooks.register('register_settings_menu_item') def register_sites_menu_item(): return MenuItem(_('Sites'), urlresolvers.reverse('wagtailsites_index'), classnames='icon icon-site', order=602)
Move Sites to the settings menu (and use decorator syntax for hooks)
Move Sites to the settings menu (and use decorator syntax for hooks)
Python
bsd-3-clause
mixxorz/wagtail,wagtail/wagtail,KimGlazebrook/wagtail-experiment,gasman/wagtail,mayapurmedia/wagtail,kaedroho/wagtail,jnns/wagtail,serzans/wagtail,hanpama/wagtail,iho/wagtail,marctc/wagtail,kurtw/wagtail,nilnvoid/wagtail,nrsimha/wagtail,gasman/wagtail,jorge-marques/wagtail,Toshakins/wagtail,rsalmaso/wagtail,takeflight/wagtail,Tivix/wagtail,chimeno/wagtail,nilnvoid/wagtail,hanpama/wagtail,torchbox/wagtail,takeflight/wagtail,marctc/wagtail,hanpama/wagtail,chimeno/wagtail,iho/wagtail,taedori81/wagtail,benjaoming/wagtail,takeshineshiro/wagtail,bjesus/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,Tivix/wagtail,torchbox/wagtail,Klaudit/wagtail,rv816/wagtail,m-sanders/wagtail,nealtodd/wagtail,kaedroho/wagtail,FlipperPA/wagtail,chrxr/wagtail,mayapurmedia/wagtail,nrsimha/wagtail,WQuanfeng/wagtail,zerolab/wagtail,KimGlazebrook/wagtail-experiment,benjaoming/wagtail,janusnic/wagtail,taedori81/wagtail,tangentlabs/wagtail,iho/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,mephizzle/wagtail,thenewguy/wagtail,Pennebaker/wagtail,mikedingjan/wagtail,gasman/wagtail,nimasmi/wagtail,hamsterbacke23/wagtail,timorieber/wagtail,bjesus/wagtail,m-sanders/wagtail,stevenewey/wagtail,kurtrwall/wagtail,marctc/wagtail,darith27/wagtail,inonit/wagtail,mixxorz/wagtail,chrxr/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,JoshBarr/wagtail,nutztherookie/wagtail,davecranwell/wagtail,mephizzle/wagtail,KimGlazebrook/wagtail-experiment,Tivix/wagtail,jordij/wagtail,janusnic/wagtail,mayapurmedia/wagtail,rjsproxy/wagtail,hamsterbacke23/wagtail,mixxorz/wagtail,davecranwell/wagtail,jorge-marques/wagtail,gasman/wagtail,davecranwell/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,dresiu/wagtail,chimeno/wagtail,kurtrwall/wagtail,mikedingjan/wagtail,timorieber/wagtail,nealtodd/wagtail,mjec/wagtail,dresiu/wagtail,serzans/wagtail,quru/wagtail,jnns/wagtail,nimasmi/wagtail,tangentlabs/wagtail,kaedroho/wagtail,bjesus/wagtail,takeshineshiro/wagtail,takeshineshiro/wagtail,nutztherookie/wagtail,dresiu/wagtail,rjsproxy/wagtail,torchbox/wagtail,kaedroho/wagtail,nutztherookie/wagtail,rv816/wagtail,kurtw/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,timorieber/wagtail,quru/wagtail,Pennebaker/wagtail,kurtw/wagtail,jordij/wagtail,dresiu/wagtail,zerolab/wagtail,serzans/wagtail,Pennebaker/wagtail,thenewguy/wagtail,iho/wagtail,iansprice/wagtail,Toshakins/wagtail,gasman/wagtail,stevenewey/wagtail,m-sanders/wagtail,JoshBarr/wagtail,zerolab/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,inonit/wagtail,thenewguy/wagtail,rjsproxy/wagtail,wagtail/wagtail,taedori81/wagtail,jorge-marques/wagtail,jnns/wagtail,chrxr/wagtail,Klaudit/wagtail,iansprice/wagtail,nealtodd/wagtail,takeflight/wagtail,Klaudit/wagtail,takeflight/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,Klaudit/wagtail,FlipperPA/wagtail,Toshakins/wagtail,chimeno/wagtail,timorieber/wagtail,quru/wagtail,gogobook/wagtail,mixxorz/wagtail,rv816/wagtail,stevenewey/wagtail,nilnvoid/wagtail,jnns/wagtail,mikedingjan/wagtail,rjsproxy/wagtail,nimasmi/wagtail,darith27/wagtail,mayapurmedia/wagtail,takeshineshiro/wagtail,nealtodd/wagtail,wagtail/wagtail,bjesus/wagtail,mjec/wagtail,janusnic/wagtail,jordij/wagtail,thenewguy/wagtail,marctc/wagtail,rsalmaso/wagtail,zerolab/wagtail,janusnic/wagtail,kurtw/wagtail,quru/wagtail,inonit/wagtail,chrxr/wagtail,gogobook/wagtail,rv816/wagtail,FlipperPA/wagtail,darith27/wagtail,benjaoming/wagtail,taedori81/wagtail,davecranwell/wagtail,Pennebaker/wagtail,tangentlabs/wagtail,hanpama/wagtail,rsalmaso/wagtail,inonit/wagtail,nimasmi/wagtail,WQuanfeng/wagtail,stevenewey/wagtail,thenewguy/wagtail,taedori81/wagtail,m-sanders/wagtail,Toshakins/wagtail,gogobook/wagtail,iansprice/wagtail,JoshBarr/wagtail,chimeno/wagtail,jordij/wagtail,nrsimha/wagtail,Tivix/wagtail,jorge-marques/wagtail,WQuanfeng/wagtail,darith27/wagtail,mephizzle/wagtail,mephizzle/wagtail,JoshBarr/wagtail,nrsimha/wagtail,gogobook/wagtail,iansprice/wagtail,benjaoming/wagtail,mjec/wagtail,mjec/wagtail,mikedingjan/wagtail,torchbox/wagtail,rsalmaso/wagtail,kurtrwall/wagtail,dresiu/wagtail,serzans/wagtail,KimGlazebrook/wagtail-experiment
a3004d2de9c15b9d7efebb98ea6533a1a6e07062
nhlstats/__init__.py
nhlstats/__init__.py
import logging from version import __version__ logger = logging.getLogger(__name__) logger.debug('Loading %s ver %s' % (__name__, __version__)) # Actions represents the available textual items that can be passed to main # to drive dispatch. These should be all lower case, no spaces or underscores. actions = [ 'collect', 'update', 'testignore', # Allows the bin app to be run without calling into here. ] def GetDataForGame(game): pass def GetDataForGames(games=[]): for game in games: GetDataForGame(game) def GetGames(updates=False): """ Return a tuple of games. Updates gets finished games to check for updated stats, if False (default) it returns active games. """ if updates: # Get a list of recently finished games to check for updates on pass else: # Get a list of active games. pass def main(action): """ The main entry point for the application """ GetDataForGames(GetGames(action))
import logging from version import __version__ logger = logging.getLogger(__name__) logger.debug('Loading %s ver %s' % (__name__, __version__)) # Actions represents the available textual items that can be passed to main # to drive dispatch. These should be all lower case, no spaces or underscores. actions = [ 'collect', 'update', 'testignore', # Allows the bin app to be run without calling into here. ] def GetDataForGame(game): pass def GetDataForGames(games=[]): for game in games: GetDataForGame(game) def GetGames(active=True): """ Return a tuple of games. Updates gets finished games to check for updated stats, if False (default) it returns active games. """ def main(action='collect'): """ The main entry point for the application """ logger.debug('Dispatching action %s' % action) # By default, we collect info on current games if action == 'collect': GetDataForGames(GetGames(active=True)) # Otherwise we can look to update finished games elif action == 'update': GetDataForGames(GetGames(active=False)) elif action in actions: raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action) else: raise ValueError('Unknown action "%s"' % action)
Improve dispatch of actions in main
Improve dispatch of actions in main
Python
mit
fancystats/nhlstats
6689858b2364a668b362a5f00d4c86e57141dc37
numba/cuda/models.py
numba/cuda/models.py
from llvmlite import ir from numba.core.datamodel.registry import register_default from numba.core.extending import register_model, models from numba.core import types from numba.cuda.types import Dim3, GridGroup, CUDADispatcher @register_model(Dim3) class Dim3Model(models.StructModel): def __init__(self, dmm, fe_type): members = [ ('x', types.int32), ('y', types.int32), ('z', types.int32) ] super().__init__(dmm, fe_type, members) @register_model(GridGroup) class GridGroupModel(models.PrimitiveModel): def __init__(self, dmm, fe_type): be_type = ir.IntType(64) super().__init__(dmm, fe_type, be_type) @register_default(types.Float) class FloatModel(models.PrimitiveModel): def __init__(self, dmm, fe_type): if fe_type == types.float32: be_type = ir.FloatType() elif fe_type == types.float16: be_type = ir.IntType(16) elif fe_type == types.float64: be_type = ir.DoubleType() else: raise NotImplementedError(fe_type) super(FloatModel, self).__init__(dmm, fe_type, be_type) register_model(CUDADispatcher)(models.OpaqueModel)
from llvmlite import ir from numba.core.datamodel.registry import register_default from numba.core.extending import register_model, models from numba.core import types from numba.cuda.types import Dim3, GridGroup, CUDADispatcher @register_model(Dim3) class Dim3Model(models.StructModel): def __init__(self, dmm, fe_type): members = [ ('x', types.int32), ('y', types.int32), ('z', types.int32) ] super().__init__(dmm, fe_type, members) @register_model(GridGroup) class GridGroupModel(models.PrimitiveModel): def __init__(self, dmm, fe_type): be_type = ir.IntType(64) super().__init__(dmm, fe_type, be_type) @register_default(types.Float) class FloatModel(models.PrimitiveModel): def __init__(self, dmm, fe_type): if fe_type == types.float16: be_type = ir.IntType(16) elif fe_type == types.float32: be_type = ir.FloatType() elif fe_type == types.float64: be_type = ir.DoubleType() else: raise NotImplementedError(fe_type) super(FloatModel, self).__init__(dmm, fe_type, be_type) register_model(CUDADispatcher)(models.OpaqueModel)
Reorder FloatModel checks in ascending order
CUDA: Reorder FloatModel checks in ascending order
Python
bsd-2-clause
cpcloud/numba,numba/numba,numba/numba,seibert/numba,cpcloud/numba,cpcloud/numba,seibert/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,IntelLabs/numba,IntelLabs/numba,seibert/numba,IntelLabs/numba,numba/numba
4a650922ee97b9cb54b203cab9709d511487d9ff
silver/tests/factories.py
silver/tests/factories.py
"""Factories for the silver app.""" # import factory # from .. import models
import factory from silver.models import Provider class ProviderFactory(factory.django.DjangoModelFactory): class Meta: model = Provider
Add factory for the Provider model
Add factory for the Provider model
Python
apache-2.0
PressLabs/silver,PressLabs/silver,PressLabs/silver
9a51358871f04e2a5552621b6ac2c9dbe1ee8345
main.py
main.py
#!/usr/bin/env python from pysnap import Snapchat import secrets s = Snapchat() s.login(secrets.USERNAME, secrets.PASSWORD) friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1] for friend in friends_to_add: s.add_friend(friend) snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0] for snap in snaps: with open('tmp.jpg', 'wb') as f: f.write(s.get_blob(snap)) media_id = s.upload('tmp.jpg') s.post_story(media_id, 5) s.mark_viewed(snap)
!/usr/bin/env python from pysnap import Snapchat import secrets s = Snapchat() s.login(secrets.USERNAME, secrets.PASSWORD) friends_to_add = [friend['name'] for friend in s.get_updates()['added_friends'] if friend['type'] == 1] for friend in friends_to_add: s.add_friend(friend) snaps = [snap['id'] for snap in s.get_snaps() if snap['status'] == 1 and snap['media_type'] == 0] for snap in snaps: with open('~/SnapchatBot/tmp.jpg', 'wb') as f: f.write(s.get_blob(snap)) media_id = s.upload('~/SnapchatBot/tmp.jpg') s.post_story(media_id, 5) s.mark_viewed(snap)
Save temporary pictures to local directory
Save temporary pictures to local directory
Python
mit
jollex/SnapchatBot
565861256c9cf0f41217df13c4244315b4ebd74d
slybot/slybot/settings.py
slybot/slybot/settings.py
SPIDER_MANAGER_CLASS = 'slybot.spidermanager.SlybotSpiderManager' EXTENSIONS = {'slybot.closespider.SlybotCloseSpider': 1} ITEM_PIPELINES = ['slybot.dupefilter.DupeFilterPipeline'] SPIDER_MIDDLEWARES = {'slybot.spiderlets.SpiderletsMiddleware': 999} # as close as possible to spider output SLYDUPEFILTER_ENABLED = True PROJECT_DIR = 'slybot-project' try: from local_slybot_settings import * except ImportError: pass
SPIDER_MANAGER_CLASS = 'slybot.spidermanager.SlybotSpiderManager' EXTENSIONS = {'slybot.closespider.SlybotCloseSpider': 1} ITEM_PIPELINES = {'slybot.dupefilter.DupeFilterPipeline': 1} SPIDER_MIDDLEWARES = {'slybot.spiderlets.SpiderletsMiddleware': 999} # as close as possible to spider output SLYDUPEFILTER_ENABLED = True PROJECT_DIR = 'slybot-project' try: from local_slybot_settings import * except ImportError: pass
Remove deprecation warning by using new style item pipeline definition
Remove deprecation warning by using new style item pipeline definition
Python
bsd-3-clause
verylasttry/portia,amikey/portia,pombredanne/portia,chennqqi/portia,NoisyText/portia,NoisyText/portia,hmilywb/portia,flip111/portia,NoisyText/portia,nju520/portia,asa1253/portia,NicoloPernigo/portia,amikey/portia,CENDARI/portia,naveenvprakash/portia,PrasannaVenkadesh/portia,CENDARI/portia,lodow/portia-proxy,nju520/portia,flip111/portia,Youwotma/portia,Youwotma/portia,NoisyText/portia,Suninus/portia,SouthStar/portia,sntran/portia,verylasttry/portia,naveenvprakash/portia,asa1253/portia,chennqqi/portia,amikey/portia,hmilywb/portia,hanicker/portia,PrasannaVenkadesh/portia,hanicker/portia,livepy/portia,flip111/portia,hmilywb/portia,livepy/portia,pombredanne/portia,anjuncc/portia,Youwotma/portia,anjuncc/portia,amikey/portia,CENDARI/portia,asa1253/portia,PrasannaVenkadesh/portia,lodow/portia-proxy,Suninus/portia,SouthStar/portia,sntran/portia,naveenvprakash/portia,flip111/portia,hanicker/portia,anjuncc/portia,naveenvprakash/portia,livepy/portia,NicoloPernigo/portia,SouthStar/portia,verylasttry/portia,pombredanne/portia,livepy/portia,asa1253/portia,PrasannaVenkadesh/portia,CENDARI/portia,SouthStar/portia,nju520/portia,pombredanne/portia,verylasttry/portia,sntran/portia,lodow/portia-proxy,sntran/portia,hmilywb/portia,anjuncc/portia,chennqqi/portia,Suninus/portia,hanicker/portia,Suninus/portia,NicoloPernigo/portia,nju520/portia,NicoloPernigo/portia,chennqqi/portia,Youwotma/portia
d0a907872749f1bb54d6e8e160ea170059289623
source/custom/combo.py
source/custom/combo.py
# -*- coding: utf-8 -*- ## \package custom.combo # MIT licensing # See: LICENSE.txt import wx from wx.combo import OwnerDrawnComboBox class ComboBox(OwnerDrawnComboBox): def __init__(self, parent, win_id, value=wx.EmptyString, pos=wx.DefaultPosition, size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator, name=wx.ComboBoxNameStr): OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices, style, validator, name) self.Default = self.GetLabel() self.Priority = [] ## Resets ComboBox to defaults def Reset(self): if not self.Count: self.SetValue(self.Default) return self.Value == self.Default return False
# -*- coding: utf-8 -*- ## \package custom.combo # MIT licensing # See: LICENSE.txt import wx from wx.combo import OwnerDrawnComboBox class ComboBox(OwnerDrawnComboBox): def __init__(self, parent, win_id=wx.ID_ANY, value=wx.EmptyString, pos=wx.DefaultPosition, size=wx.DefaultSize, choices=[], style=0, validator=wx.DefaultValidator, name=wx.ComboBoxNameStr): OwnerDrawnComboBox.__init__(self, parent, win_id, value, pos, size, choices, style, validator, name) self.Default = self.GetLabel() self.Priority = [] ## Resets ComboBox to defaults def Reset(self): if not self.Count: self.SetValue(self.Default) return self.Value == self.Default return False
Set ComboBox class default ID to wx.ID_ANY
Set ComboBox class default ID to wx.ID_ANY
Python
mit
AntumDeluge/desktop_recorder,AntumDeluge/desktop_recorder
2560ca287e81cbefb6037e5688bfa4ef74d85149
clock.py
clock.py
from __future__ import print_function from apscheduler.schedulers.blocking import BlockingScheduler import logging import subprocess logging.basicConfig() scheduler = BlockingScheduler() @scheduler.scheduled_job('interval', minutes=1) def timed_job_min1(): print("Run notifier") subprocess.run( "notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685", shell=True, check=True) # @scheduler.scheduled_job('interval', minutes=10) # def timed_job_min10(): # print("Run notifier") # subprocess.run( # "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685", # shell=True, # check=True) scheduler.start()
from __future__ import print_function from apscheduler.schedulers.blocking import BlockingScheduler import logging import subprocess logging.basicConfig() scheduler = BlockingScheduler() @scheduler.scheduled_job('interval', minutes=1) def timed_job_min1(): print("Run notifier") subprocess.check_call( "notifier -concurrency=5 -fetcher-cache=true -notification-interval=1 && curl -sS https://nosnch.in/c411a3a685", shell=True) # @scheduler.scheduled_job('interval', minutes=10) # def timed_job_min10(): # print("Run notifier") # subprocess.run( # "notifier -concurrency=5 -fetcher-cache=true -notification-interval=10 && curl -sS https://nosnch.in/c411a3a685", # shell=True, # check=True) scheduler.start()
Change call method for Python2.7
Change call method for Python2.7
Python
mit
oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/dmm-eikaiwa-fft,oinume/lekcije,oinume/lekcije,oinume/lekcije,oinume/dmm-eikaiwa-fft
6bb9a4ed50ad879c56cdeae0dedb49bba6780780
matchers/volunteer.py
matchers/volunteer.py
import random from base import BaseMatcher class VolunteerMatcher(BaseMatcher): dev_text = "volunteer someone" all_text = "volunteer a dev" dev_candidates = ['Steve', 'Arthur', 'Honza', 'Fernando', 'Nick'] all_candidates = dev_candidates + ['Craig', 'Evan'] def respond(self, message, user=None): if self.dev_text in message.lower(): victim = random.choice(self.dev_candidates) self.speak('%s is it' % victim) elif self.all_text in message.lower(): victim = random.choice(self.all_candidates) self.speak('%s is it' % victim)
import random from base import BaseMatcher class VolunteerMatcher(BaseMatcher): dev_text = "volunteer someone" all_text = "volunteer a dev" dev_candidates = ['sjl', 'arthurdebert', 'honza', 'fernandotakai', 'nicksergeant'] all_candidates = dev_candidates + ['cz', 'ehazlett'] def respond(self, message, user=None): if self.dev_text in message.lower(): victim = random.choice(self.dev_candidates) self.speak('%s is it' % victim) elif self.all_text in message.lower(): victim = random.choice(self.all_candidates) self.speak('%s is it' % victim)
Use IRC Nicks instead of real names.
Use IRC Nicks instead of real names.
Python
bsd-2-clause
honza/nigel
b24083b0991157a1e0d8a533fc1cac3aa2e4523c
similarities/utils.py
similarities/utils.py
import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) return Artist.objects.filter(similarity__other_artist=artist, similarity__weight__gt=0)
from django.db.models import Q import echonest from artists.models import Artist from echonest.models import SimilarResponse from users.models import User from .models import (GeneralArtist, UserSimilarity, Similarity, update_similarities) def add_new_similarities(artist, force_update=False): similarities = [] responses = SimilarResponse.objects.filter( normalized_name=artist.normalized_name) if responses.exists() and not force_update: return # Echo Nest similarities already added user = User.objects.get(email='echonest') artist_names = echonest.get_similar(artist.name) cc_artists = Artist.objects.filter(name__in=artist_names) for cc_artist in cc_artists: kwargs = dict( cc_artist=cc_artist, other_artist=artist, ) UserSimilarity.objects.get_or_create(defaults={'weight': 1}, user=user, **kwargs) similarities.append(Similarity.objects.get_or_create(**kwargs)[0]) update_similarities(similarities) def get_similar(name): artist, _ = GeneralArtist.objects.get_or_create( normalized_name=name.upper(), defaults={'name': name}) add_new_similarities(artist) similar = Q(similarity__other_artist=artist, similarity__weight__gt=0) return Artist.objects.filter(similar).order_by('-similarity__weight')
Order similar artist results properly
Order similar artist results properly
Python
bsd-3-clause
FreeMusicNinja/api.freemusic.ninja
5cac0d8b336cb8efe7d819d47abf46ccadea7b29
generic_utils/templatetags.py
generic_utils/templatetags.py
from django import template class InvalidParamsError(template.TemplateSyntaxError): ''' Custom exception class to distinguish usual TemplateSyntaxErrors and validation errors for templatetags introduced by ``validate_params`` function''' pass def validate_params(bits, arguments_count, keyword_positions): ''' Raises exception if passed params (`bits`) do not match signature. Signature is defined by `arguments_count` (acceptible number of params) and keyword_positions (dictionary with positions in keys and keywords in values, for ex. {2:'by', 4:'of', 5:'type', 7:'as'}). ''' if len(bits) != arguments_count+1: raise InvalidTagParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,)) for pos in keyword_positions: value = keyword_positions[pos] if bits[pos] != value: raise InvalidTagParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
from django import template class InvalidParamsError(template.TemplateSyntaxError): ''' Custom exception class to distinguish usual TemplateSyntaxErrors and validation errors for templatetags introduced by ``validate_params`` function''' pass def validate_params(bits, arguments_count, keyword_positions): ''' Raises exception if passed params (`bits`) do not match signature. Signature is defined by `arguments_count` (acceptible number of params) and keyword_positions (dictionary with positions in keys and keywords in values, for ex. {2:'by', 4:'of', 5:'type', 7:'as'}). ''' if len(bits) != arguments_count+1: raise InvalidParamsError("'%s' tag takes %d arguments" % (bits[0], arguments_count,)) for pos in keyword_positions: value = keyword_positions[pos] if bits[pos] != value: raise InvalidParamsError("argument #%d to '%s' tag must be '%s'" % (pos, bits[0], value))
Fix typo/bug in validate_params function
Fix typo/bug in validate_params function
Python
mit
kmike/django-generic-images,kmike/django-generic-images,kmike/django-generic-images
7016b7bb026e0fe557ca06efa81dace9999e526d
hubbot/Modules/Healthcheck.py
hubbot/Modules/Healthcheck.py
from twisted.internet import reactor, protocol from hubbot.moduleinterface import ModuleInterface class Echo(protocol.Protocol): """This is just about the simplest possible protocol""" def dataReceived(self, data): """As soon as any data is received, write it back.""" self.transport.write(data) class Healthcheck(ModuleInterface): port = 9999 def __init__(self, bot): self.healthcheck_server = protocol.ServerFactory() self.healthcheck_server.protocol = Echo super().__init__(bot) def on_load(self): reactor.listenTCP(self.port, self.healthcheck_server) def on_unload(self): reactor.stopListening(self.port) def help(self, message): return f"Hosts an HTTP healthcheck server on port {self.port}."
from twisted.protocols import basic from twisted.internet import protocol, reactor from hubbot.moduleinterface import ModuleInterface class HealthcheckProtocol(basic.LineReceiver): def lineReceived(self, line): response_body = "All is well. Ish." self.sendLine("HTTP/1.0 200 OK".encode("UTF-8")) self.sendLine("Content-Type: text/plain".encode("UTF-8")) self.sendLine(f"Content-Length: {len(response_body)}\n".encode("UTF-8")) self.transport.write(response_body) self.transport.loseConnection() class Healthcheck(ModuleInterface): port = 9999 def __init__(self, bot): self.healthcheck_server = protocol.ServerFactory() self.healthcheck_server.protocol = HealthcheckProtocol super().__init__(bot) def on_load(self): reactor.listenTCP(self.port, self.healthcheck_server) def on_unload(self): reactor.stopListening(self.port) def help(self, message): return f"Hosts an HTTP healthcheck server on port {self.port}."
Write a slightly less dumb protocol?
Write a slightly less dumb protocol?
Python
mit
HubbeKing/Hubbot_Twisted
1704e66caa06524d9b595c312d3a5f5d93683261
app/models/cnes_bed.py
app/models/cnes_bed.py
from sqlalchemy import Column, Integer, String, func from app import db class CnesBed(db.Model): __tablename__ = 'cnes_bed' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', ] @classmethod def aggregate(cls, value): return { 'beds': func.count(cls.cnes) }[value] @classmethod def values(cls): return ['beds']
from sqlalchemy import Column, Integer, String, func from app import db class CnesBed(db.Model): __tablename__ = 'cnes_bed' year = Column(Integer, primary_key=True) region = Column(String(1), primary_key=True) mesoregion = Column(String(4), primary_key=True) microregion = Column(String(5), primary_key=True) state = Column(String(2), primary_key=True) municipality = Column(String(7), primary_key=True) cnes = Column(String(7), primary_key=True) bed_type = Column(String(7), primary_key=True) @classmethod def dimensions(cls): return [ 'year', 'region', 'mesoregion', 'microregion', 'state', 'municipality', ] @classmethod def aggregate(cls, value): return { 'beds': func.count() }[value] @classmethod def values(cls): return ['beds']
Add bed_type to cnes_establishment model
Add bed_type to cnes_establishment model
Python
mit
DataViva/dataviva-api,daniel1409/dataviva-api
0eaff91695eefcf289e31d8ca93d19ab5bbd392d
katana/expr.py
katana/expr.py
import re class Expr(object): def __init__(self, name, regex): self.name = name self.regex = regex def on_match(self, string): return [self.name, string] def callback(self, _, string): return self.on_match(string) class Scanner(object): def __init__(self, exprs): self.scanner = re.Scanner([ (e.regex, e.callback) for e in exprs ]) def match(self, string): tokens, extra = self.scanner.scan(string) if extra: raise ValueError return tokens
import re class Expr(object): def __init__(self, name, regex): self.name = name self.regex = regex def __iter__(self): yield self.regex yield lambda _, token: self.on_match(token) def on_match(self, string): return [self.name, string] class Scanner(object): def __init__(self, exprs): self.scanner = re.Scanner([ tuple(e) for e in exprs ]) def match(self, string): tokens, extra = self.scanner.scan(string) if extra: raise ValueError return tokens
Refactor Expr object to be more self contained
Refactor Expr object to be more self contained
Python
mit
eugene-eeo/katana
7a427df2157948b5afb5ca3a1d22df72e51f7a89
ckanext/syndicate/tests/test_plugin.py
ckanext/syndicate/tests/test_plugin.py
from mock import patch import unittest import ckan.model as model from ckan.model.domain_object import DomainObjectOperation from ckanext.syndicate.plugin import SyndicatePlugin class TestPlugin(unittest.TestCase): def test_notify_syndicates_task(self): entity = model.Package() entity.extras = {'syndicate': 'true'} with patch('ckanext.syndicate.plugin.syndicate_task') as mock_syndicate: plugin = SyndicatePlugin() plugin.notify(entity, DomainObjectOperation.new) mock_syndicate.assert_called_with(entity.id, 'dataset/create')
Add test for notify dataset/create
Add test for notify dataset/create
Python
agpl-3.0
aptivate/ckanext-syndicate,aptivate/ckanext-syndicate,sorki/ckanext-redmine-autoissues,sorki/ckanext-redmine-autoissues
ce2e5b0dc3ddafe931a902cb7aa24c3adbc246b7
fireplace/cards/wog/neutral_legendary.py
fireplace/cards/wog/neutral_legendary.py
from ..utils import * ## # Minions
from ..utils import * ## # Minions class OG_122: "Mukla, Tyrant of the Vale" play = Give(CONTROLLER, "EX1_014t") * 2 class OG_318: "Hogger, Doom of Elwynn" events = SELF_DAMAGE.on(Summon(CONTROLLER, "OG_318t")) class OG_338: "Nat, the Darkfisher" events = BeginTurn(OPPONENT).on(COINFLIP & Draw(OPPONENT))
Implement corrupted Mukla, Hogger and Nat
Implement corrupted Mukla, Hogger and Nat
Python
agpl-3.0
beheh/fireplace,NightKev/fireplace,jleclanche/fireplace
5ed9e43ec451aca9bdca4391bd35934e5fe4aea3
huts/management/commands/dumphutsjson.py
huts/management/commands/dumphutsjson.py
from django.core.management.base import BaseCommand from huts.utils import export class Command(BaseCommand): args = '' help = 'Dumps the huts, agencies, and regions in the json api format.' def handle(self, *args, **options): print(export.db_as_json().encode('utf-8'))
from optparse import make_option from django.core.management.base import BaseCommand from huts.utils import export class Command(BaseCommand): option_list = BaseCommand.option_list + ( make_option( '--file', help='Write to file instead of stdout' ), ) help = 'Dumps the huts, agencies, and regions in the json api format.' def handle(self, *args, **options): out = options['file'] or self.stdout out.write(export.db_as_json().encode('utf-8'))
Update command to take file argument
Update command to take file argument
Python
mit
dylanfprice/hutmap,dylanfprice/hutmap,dylanfprice/hutmap,muescha/hutmap,muescha/hutmap,dylanfprice/hutmap,muescha/hutmap,muescha/hutmap
f79644e88d64d387ea653a19fef0c2aa0772195f
swaggertester.py
swaggertester.py
import logging import hypothesis from client import SwaggerClient from templates import APITemplate from strategies import hypothesize_parameters log = logging.getLogger(__name__) def validate_schema(schema_path): client = SwaggerClient(schema_path) api_template = APITemplate(client) log.debug("Expanded endpoints as: %r", api_template) for operation in api_template.iter_template_operations(): validate_operation(client, operation) def validate_operation(client, operation): strategy = hypothesize_parameters(operation.parameters) @hypothesis.settings(max_examples=20, suppress_health_check=[hypothesis.HealthCheck.too_slow]) @hypothesis.given(strategy) def single_operation_test(client, params): log.info("Testing with params: %r", params) result = client.request(operation, params) assert result.status in operation.response_codes, \ "{} not in {}".format(result.status, operation.response_codes) single_operation_test(client) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) validate_schema('http://127.0.0.1:5000/api/schema')
import logging import hypothesis from client import SwaggerClient from templates import APITemplate from strategies import hypothesize_parameters log = logging.getLogger(__name__) def validate_schema(schema_path): client = SwaggerClient(schema_path) api_template = APITemplate(client) log.debug("Expanded endpoints as: %r", api_template) for operation in api_template.iter_template_operations(): validate_operation(client, operation) def validate_operation(client, operation): strategy = hypothesize_parameters(operation.parameters) @hypothesis.settings(max_examples=20) @hypothesis.given(strategy) def single_operation_test(client, operation, params): """Test an operation fully. :param client: The client to use to access the API. :type client: SwaggerClient :param operation: The operation to test. :type operation: OperationTemplate :param params: The dictionary of parameters to use on the operation. :type params: dict """ log.info("Testing with params: %r", params) result = client.request(operation, params) assert result.status in operation.response_codes, \ "{} not in {}".format(result.status, operation.response_codes) assert 'application/json' in result.header['Content-Type'], \ "application/json not in {}".format(result.header['Content-Type']) single_operation_test(client, operation) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) validate_schema('http://127.0.0.1:5000/api/schema')
Validate returned content-type is application/json
Validate returned content-type is application/json
Python
mit
olipratt/swagger-conformance
a9bc2a097516b36580946518978c1448df4ded6d
apt/get.py
apt/get.py
import os import subprocess def install(*packages): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', '-y', 'install'] + packages, env=env) def update(): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', 'update'], env=env) def upgrade(): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', 'upgrade'], env=env)
import os import subprocess def install(*packages): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', '-y', 'install'] + list(packages), env=env) def update(): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', 'update'], env=env) def upgrade(): env = os.environ.copy() env['DEBIAN_FRONTEND'] = "noninteractive" subprocess.call(['sudo', '-E', 'apt-get', 'upgrade'], env=env)
Convert tuple to list for concatination
Convert tuple to list for concatination
Python
mit
hatchery/genepool,hatchery/Genepool2
509d1af832ac31d2b6334b82c59c44eb00c0e434
salt/grains/extra.py
salt/grains/extra.py
# -*- coding: utf-8 -*- from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): with salt.utils.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
# -*- coding: utf-8 -*- from __future__ import absolute_import # Import python libs import os # Import third party libs import yaml import logging # Import salt libs import salt.utils __proxyenabled__ = ['*'] log = logging.getLogger(__name__) def shell(): ''' Return the default shell to use on this system ''' # Provides: # shell return {'shell': os.environ.get('SHELL', '/bin/sh')} def config(): ''' Return the grains set in the grains file ''' if 'conf_file' not in __opts__: return {} if os.path.isdir(__opts__['conf_file']): if salt.utils.is_proxy(): gfn = os.path.join( __opts__['conf_file'], 'proxy.d', __opts__['id'], 'grains' ) else: gfn = os.path.join( __opts__['conf_file'], 'grains' ) else: if salt.utils.is_proxy(): gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'proxy.d', __opts__['id'], 'grains' ) else: gfn = os.path.join( os.path.dirname(__opts__['conf_file']), 'grains' ) if os.path.isfile(gfn): log.debug('Loading static grains from %s', gfn) with salt.utils.fopen(gfn, 'rb') as fp_: try: return yaml.safe_load(fp_.read()) except Exception: log.warning("Bad syntax in grains file! Skipping.") return {} return {}
Allow proxy minions to load static grains
Allow proxy minions to load static grains Add the `__proxyenabled__` global var so the extra grains are loaded. Inside the `config` function of the extra grains check if the minion is a proxy, then try loading from <conf_file>/proxy.d/<proxy id>/grains.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
4f9db35566332778853e993f7791116d66c49dd4
grako/rendering.py
grako/rendering.py
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import, unicode_literals import itertools from .util import trim def render(item, **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, Renderer): return item.render(**fields) elif isinstance(item, list): return ''.join(render(e) for e in item) else: return str(item) class Renderer(object): template = '' _counter = itertools.count() def __init__(self, template=None): if template is not None: self.template = template def counter(self): return next(self._counter) def render_fields(self, fields): pass def render(self, template=None, **fields): if template is None: template = self.template fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')}) self.render_fields(fields) fields = {k:render(v) for k, v in fields.items()} try: return trim(template).format(**fields) except KeyError as e: raise KeyError(str(e), type(self))
# -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import, unicode_literals import itertools from .util import trim def render(item, **fields): """ Render the given item """ if item is None: return '' elif isinstance(item, Renderer): return item.render(**fields) elif isinstance(item, list): return ''.join(render(e) for e in item) else: return str(item) class Renderer(object): template = '' _counter = itertools.count() def __init__(self, template=None): if template is not None: self.template = template def counter(self): return next(self._counter) def render_fields(self, fields): pass def render(self, template=None, **fields): fields.update({k:v for k, v in vars(self).items() if not k.startswith('_')}) self.render_fields(fields) if template is None: template = self.template fields = {k:render(v) for k, v in fields.items()} try: return trim(template).format(**fields) except KeyError as e: raise KeyError(str(e), type(self))
Allow render_fields to override the default template.
Allow render_fields to override the default template.
Python
bsd-2-clause
swayf/grako,swayf/grako
2d3e52567d7d361428ce93d02cc42ecaddacab6c
tests/test_commands.py
tests/test_commands.py
# -*- coding: utf-8 -*- from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None)
# -*- coding: utf-8 -*- from couchapp import commands from couchapp.errors import AppError from mock import Mock, patch from nose.tools import raises @patch('couchapp.commands.document') def test_init_dest(mock_doc): commands.init(None, None, '/tmp/mk') mock_doc.assert_called_once_with('/tmp/mk', create=True) @patch('os.getcwd', return_value='/mock_dir') @patch('couchapp.commands.document') def test_init_dest_auto(mock_doc, mock_cwd): commands.init(None, None) mock_doc.assert_called_once_with('/mock_dir', create=True) @raises(AppError) @patch('os.getcwd', return_value=None) @patch('couchapp.commands.document') def test_init_dest_none(mock_doc, mock_cwd): commands.init(None, None) def test_push_outside(): ''' $ couchapp push /path/to/app ''' pass @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_outside(mock_doc): ''' $ couchapp push --export /path/to/app ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, None, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir) @patch('couchapp.commands.document', return_value='{"status": "ok"}') def test_push_export_inside(mock_doc): ''' In the app dir:: $ couchapp push --export ''' conf = Mock(name='conf') appdir = '/mock_dir' commands.push(conf, appdir, export=True) mock_doc.assert_called_once_with(appdir, create=False, docid=None) conf.update.assert_called_once_with(appdir)
Test cases for push with export flag
Test cases for push with export flag
Python
apache-2.0
couchapp/couchapp,h4ki/couchapp,couchapp/couchapp,couchapp/couchapp,h4ki/couchapp,h4ki/couchapp,couchapp/couchapp,h4ki/couchapp
5356aee78495dd7846cec6625b3980ba93be86e6
tests/test_settings.py
tests/test_settings.py
from __future__ import unicode_literals from os.path import dirname MIU_TEST_ROOT = dirname(__file__) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
from __future__ import unicode_literals from os.path import dirname, abspath, join BASE_DIR = dirname(abspath(__file__)) INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "markitup", "tests", "tests.test_migration", ] DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3" } } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [ join(BASE_DIR, 'templates'), ], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.template.context_processors.debug', 'django.template.context_processors.i18n', 'django.template.context_processors.media', 'django.template.context_processors.static', 'django.template.context_processors.tz', 'django.contrib.messages.context_processors.messages', ], }, }, ] STATIC_URL = "/static/" ROOT_URLCONF = "tests.urls" # Use str so this isn't unicode on python 2 MARKITUP_FILTER = (str("tests.filter.testfilter"), {"arg": "replacement"}) SECRET_KEY = 'test-secret' MIDDLEWARE_CLASSES = []
Configure TEMPLATES in test settings.
Configure TEMPLATES in test settings.
Python
bsd-3-clause
zsiciarz/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,zsiciarz/django-markitup,carljm/django-markitup,carljm/django-markitup
9dc253b79d885ca205b557f88fca6fa35bd8fe21
tests/test_selector.py
tests/test_selector.py
from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = list(sel.ready()) for ev in r: assert ev.monitored in monitors assert ev.fp in handles assert r assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = set(k.fp for k in sel.ready()) assert r == set(handles) assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
Make Selector.scope test more rigorous
Make Selector.scope test more rigorous
Python
mit
eugene-eeo/scell
7520e1285af36292def45f892808841e78cc4a2b
bloop/index.py
bloop/index.py
missing = object() class GlobalSecondaryIndex(object): def __init__(self, hash_key=None, range_key=None, write_units=1, read_units=1, name=missing): self._model_name = None self._backing_name = name self.write_units = write_units self.read_units = read_units self.hash_key = hash_key self.range_key = range_key @property def model_name(self): ''' Name of the model's attr that references self ''' return self._model_name @property def dynamo_name(self): if self._backing_name is missing: return self.model_name return self._backing_name
class Index(object): def __init__(self, write_units=1, read_units=1, name=None, range_key=None): self._model_name = None self._dynamo_name = name self.write_units = write_units self.read_units = read_units self.range_key = range_key @property def model_name(self): ''' Name of the model's attr that references self ''' return self._model_name @model_name.setter def model_name(self, value): if self._model_name is not None: raise AttributeError("{} model_name already set to '{}'".format( self.__class__.__name__, self._model_name)) self._model_name = value @property def dynamo_name(self): if self._dynamo_name is None: return self.model_name return self._dynamo_name class GlobalSecondaryIndex(Index): def __init__(self, hash_key=None, **kwargs): super().__init__(**kwargs) self.hash_key = hash_key class LocalSecondaryIndex(Index): ''' when constructing a model, you MUST set this index's model attr. ''' @property def hash_key(self): hash_column = self.model.__meta__['dynamo.table.hash_key'] return hash_column.dynamo_name
Refactor GSI, LSI to use base Index class
Refactor GSI, LSI to use base Index class
Python
mit
numberoverzero/bloop,numberoverzero/bloop
db4ccce9e418a1227532bde8834ca682bc873609
system/t04_mirror/show.py
system/t04_mirror/show.py
from lib import BaseTest class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib"
from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
Remove updated at while comparing.
Remove updated at while comparing.
Python
mit
gearmover/aptly,bsundsrud/aptly,adfinis-forks/aptly,vincentbernat/aptly,gdbdzgd/aptly,ceocoder/aptly,adfinis-forks/aptly,seaninspace/aptly,neolynx/aptly,scalp42/aptly,gdbdzgd/aptly,sobczyk/aptly,neolynx/aptly,scalp42/aptly,aptly-dev/aptly,seaninspace/aptly,aptly-dev/aptly,bsundsrud/aptly,gdbdzgd/aptly,bankonme/aptly,adfinis-forks/aptly,sobczyk/aptly,seaninspace/aptly,vincentbernat/aptly,smira/aptly,jola5/aptly,scalp42/aptly,smira/aptly,ceocoder/aptly,gearmover/aptly,bankonme/aptly,bsundsrud/aptly,vincentbernat/aptly,ceocoder/aptly,jola5/aptly,jola5/aptly,aptly-dev/aptly,gearmover/aptly,sobczyk/aptly,neolynx/aptly,smira/aptly,bankonme/aptly
1e8c094c0f806b624a41447446676c1f2ac3590d
tools/debug_adapter.py
tools/debug_adapter.py
#!/usr/bin/python import sys if 'darwin' in sys.platform: sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python') sys.path.append('.') import adapter adapter.main.run_tcp_server()
#!/usr/bin/python import sys import subprocess import string out = subprocess.check_output(['lldb', '-P']) sys.path.append(string.strip(out)) sys.path.append('.') import adapter adapter.main.run_tcp_server()
Fix adapter debugging on Linux.
Fix adapter debugging on Linux.
Python
mit
vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb
b757a5e24fa8018647827b8194c985881df872d5
scipy/signal/setup.py
scipy/signal/setup.py
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_extension('sigtools', sources=['sigtoolsmodule.c', 'firfilter.c','medianfilter.c'], depends = ['sigtools.h'] ) config.add_extension('spline', sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c', 'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'], ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('signal', parent_package, top_path) config.add_data_dir('tests') config.add_extension('sigtools', sources=['sigtoolsmodule.c', 'firfilter.c','medianfilter.c'], depends = ['sigtools.h', 'newsig.c'] ) config.add_extension('spline', sources = ['splinemodule.c','S_bspline_util.c','D_bspline_util.c', 'C_bspline_util.c','Z_bspline_util.c','bspline_util.c'], ) return config if __name__ == '__main__': from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add newsig.c as a dependency to sigtools module.
Add newsig.c as a dependency to sigtools module.
Python
bsd-3-clause
andyfaff/scipy,sauliusl/scipy,newemailjdm/scipy,jor-/scipy,mikebenfield/scipy,jsilter/scipy,mortada/scipy,josephcslater/scipy,jjhelmus/scipy,trankmichael/scipy,larsmans/scipy,jamestwebber/scipy,jonycgn/scipy,haudren/scipy,petebachant/scipy,endolith/scipy,vigna/scipy,e-q/scipy,raoulbq/scipy,aeklant/scipy,fredrikw/scipy,josephcslater/scipy,efiring/scipy,scipy/scipy,nonhermitian/scipy,sriki18/scipy,dch312/scipy,nvoron23/scipy,gfyoung/scipy,ales-erjavec/scipy,woodscn/scipy,kalvdans/scipy,nvoron23/scipy,gef756/scipy,richardotis/scipy,pyramania/scipy,Newman101/scipy,gfyoung/scipy,person142/scipy,teoliphant/scipy,njwilson23/scipy,aman-iitj/scipy,Gillu13/scipy,pbrod/scipy,gef756/scipy,futurulus/scipy,sonnyhu/scipy,ChanderG/scipy,tylerjereddy/scipy,jjhelmus/scipy,woodscn/scipy,witcxc/scipy,bkendzior/scipy,haudren/scipy,aman-iitj/scipy,jjhelmus/scipy,WarrenWeckesser/scipy,vhaasteren/scipy,pizzathief/scipy,zerothi/scipy,apbard/scipy,nonhermitian/scipy,haudren/scipy,andyfaff/scipy,endolith/scipy,gdooper/scipy,pschella/scipy,vanpact/scipy,dch312/scipy,anntzer/scipy,dch312/scipy,anielsen001/scipy,zxsted/scipy,niknow/scipy,newemailjdm/scipy,Srisai85/scipy,anntzer/scipy,person142/scipy,raoulbq/scipy,nonhermitian/scipy,richardotis/scipy,Newman101/scipy,rgommers/scipy,aarchiba/scipy,hainm/scipy,jonycgn/scipy,jsilter/scipy,matthew-brett/scipy,kalvdans/scipy,juliantaylor/scipy,pschella/scipy,Srisai85/scipy,Srisai85/scipy,Shaswat27/scipy,apbard/scipy,sargas/scipy,sriki18/scipy,mortonjt/scipy,ales-erjavec/scipy,Newman101/scipy,jakevdp/scipy,Dapid/scipy,maciejkula/scipy,jonycgn/scipy,Kamp9/scipy,lukauskas/scipy,trankmichael/scipy,endolith/scipy,andim/scipy,befelix/scipy,raoulbq/scipy,dch312/scipy,andim/scipy,grlee77/scipy,nmayorov/scipy,pschella/scipy,fernand/scipy,mtrbean/scipy,mgaitan/scipy,zxsted/scipy,vberaudi/scipy,scipy/scipy,vberaudi/scipy,andim/scipy,rmcgibbo/scipy,newemailjdm/scipy,hainm/scipy,aarchiba/scipy,behzadnouri/scipy,niknow/scipy,bkendzior/scipy,Eric89GXL/scipy,apbard/scipy,mgaitan/scipy,Shaswat27/scipy,e-q/scipy,vberaudi/scipy,ilayn/scipy,woodscn/scipy,felipebetancur/scipy,kalvdans/scipy,witcxc/scipy,jseabold/scipy,anielsen001/scipy,mdhaber/scipy,rmcgibbo/scipy,Stefan-Endres/scipy,zerothi/scipy,vanpact/scipy,anntzer/scipy,Kamp9/scipy,Kamp9/scipy,ChanderG/scipy,tylerjereddy/scipy,minhlongdo/scipy,mikebenfield/scipy,piyush0609/scipy,WarrenWeckesser/scipy,richardotis/scipy,ogrisel/scipy,scipy/scipy,zaxliu/scipy,dominicelse/scipy,jjhelmus/scipy,lukauskas/scipy,andyfaff/scipy,fernand/scipy,gertingold/scipy,scipy/scipy,mhogg/scipy,mingwpy/scipy,arokem/scipy,aeklant/scipy,jakevdp/scipy,mhogg/scipy,WillieMaddox/scipy,zxsted/scipy,perimosocordiae/scipy,befelix/scipy,sauliusl/scipy,ales-erjavec/scipy,jseabold/scipy,rgommers/scipy,aarchiba/scipy,newemailjdm/scipy,Shaswat27/scipy,pnedunuri/scipy,pyramania/scipy,mingwpy/scipy,mortonjt/scipy,niknow/scipy,befelix/scipy,ales-erjavec/scipy,Newman101/scipy,ChanderG/scipy,jonycgn/scipy,trankmichael/scipy,lukauskas/scipy,Dapid/scipy,pnedunuri/scipy,Eric89GXL/scipy,andyfaff/scipy,cpaulik/scipy,Eric89GXL/scipy,gertingold/scipy,anntzer/scipy,giorgiop/scipy,vhaasteren/scipy,zerothi/scipy,behzadnouri/scipy,futurulus/scipy,surhudm/scipy,sargas/scipy,rgommers/scipy,lhilt/scipy,sargas/scipy,Newman101/scipy,Dapid/scipy,pizzathief/scipy,maniteja123/scipy,FRidh/scipy,matthewalbani/scipy,niknow/scipy,WarrenWeckesser/scipy,Srisai85/scipy,rmcgibbo/scipy,ndchorley/scipy,gfyoung/scipy,chatcannon/scipy,larsmans/scipy,zerothi/scipy,Shaswat27/scipy,sonnyhu/scipy,pizzathief/scipy,ilayn/scipy,gdooper/scipy,aeklant/scipy,efiring/scipy,gfyoung/scipy,jor-/scipy,nmayorov/scipy,petebachant/scipy,sonnyhu/scipy,gdooper/scipy,maniteja123/scipy,jsilter/scipy,mortada/scipy,witcxc/scipy,mtrbean/scipy,andyfaff/scipy,njwilson23/scipy,mdhaber/scipy,hainm/scipy,Stefan-Endres/scipy,ogrisel/scipy,minhlongdo/scipy,gef756/scipy,jor-/scipy,vanpact/scipy,pbrod/scipy,mdhaber/scipy,gertingold/scipy,vberaudi/scipy,felipebetancur/scipy,pnedunuri/scipy,mdhaber/scipy,richardotis/scipy,dominicelse/scipy,cpaulik/scipy,mtrbean/scipy,mtrbean/scipy,giorgiop/scipy,nvoron23/scipy,gertingold/scipy,FRidh/scipy,ortylp/scipy,mikebenfield/scipy,Stefan-Endres/scipy,Stefan-Endres/scipy,felipebetancur/scipy,kleskjr/scipy,gef756/scipy,pizzathief/scipy,sauliusl/scipy,sauliusl/scipy,mingwpy/scipy,jor-/scipy,cpaulik/scipy,rmcgibbo/scipy,behzadnouri/scipy,kleskjr/scipy,hainm/scipy,pschella/scipy,larsmans/scipy,petebachant/scipy,mortada/scipy,vigna/scipy,Newman101/scipy,minhlongdo/scipy,perimosocordiae/scipy,bkendzior/scipy,zaxliu/scipy,vigna/scipy,Dapid/scipy,Shaswat27/scipy,maniteja123/scipy,njwilson23/scipy,pyramania/scipy,jonycgn/scipy,raoulbq/scipy,josephcslater/scipy,anntzer/scipy,befelix/scipy,ortylp/scipy,nmayorov/scipy,gdooper/scipy,anielsen001/scipy,fernand/scipy,haudren/scipy,ortylp/scipy,jsilter/scipy,niknow/scipy,ilayn/scipy,sonnyhu/scipy,jseabold/scipy,felipebetancur/scipy,surhudm/scipy,mortonjt/scipy,pnedunuri/scipy,mortada/scipy,zaxliu/scipy,josephcslater/scipy,grlee77/scipy,felipebetancur/scipy,matthew-brett/scipy,teoliphant/scipy,ortylp/scipy,vanpact/scipy,larsmans/scipy,argriffing/scipy,fernand/scipy,mortonjt/scipy,zxsted/scipy,zxsted/scipy,cpaulik/scipy,vigna/scipy,petebachant/scipy,maniteja123/scipy,teoliphant/scipy,Gillu13/scipy,larsmans/scipy,jamestwebber/scipy,njwilson23/scipy,mgaitan/scipy,vhaasteren/scipy,fernand/scipy,chatcannon/scipy,endolith/scipy,hainm/scipy,andim/scipy,grlee77/scipy,tylerjereddy/scipy,matthewalbani/scipy,minhlongdo/scipy,sargas/scipy,mhogg/scipy,Gillu13/scipy,vigna/scipy,jamestwebber/scipy,jsilter/scipy,richardotis/scipy,gef756/scipy,FRidh/scipy,mgaitan/scipy,zaxliu/scipy,mdhaber/scipy,piyush0609/scipy,matthewalbani/scipy,arokem/scipy,josephcslater/scipy,maciejkula/scipy,person142/scipy,ogrisel/scipy,mikebenfield/scipy,Srisai85/scipy,mingwpy/scipy,matthew-brett/scipy,piyush0609/scipy,Stefan-Endres/scipy,nonhermitian/scipy,arokem/scipy,maciejkula/scipy,argriffing/scipy,zerothi/scipy,lhilt/scipy,ogrisel/scipy,lukauskas/scipy,efiring/scipy,bkendzior/scipy,grlee77/scipy,arokem/scipy,matthew-brett/scipy,nonhermitian/scipy,rmcgibbo/scipy,fredrikw/scipy,sonnyhu/scipy,pnedunuri/scipy,sriki18/scipy,andim/scipy,niknow/scipy,zxsted/scipy,kalvdans/scipy,sauliusl/scipy,WillieMaddox/scipy,richardotis/scipy,tylerjereddy/scipy,vhaasteren/scipy,mtrbean/scipy,dch312/scipy,piyush0609/scipy,aarchiba/scipy,gdooper/scipy,andyfaff/scipy,raoulbq/scipy,aeklant/scipy,newemailjdm/scipy,Kamp9/scipy,Gillu13/scipy,newemailjdm/scipy,kleskjr/scipy,andim/scipy,Dapid/scipy,giorgiop/scipy,fredrikw/scipy,maciejkula/scipy,Eric89GXL/scipy,mgaitan/scipy,larsmans/scipy,apbard/scipy,behzadnouri/scipy,gertingold/scipy,rgommers/scipy,pbrod/scipy,haudren/scipy,petebachant/scipy,futurulus/scipy,ndchorley/scipy,endolith/scipy,ChanderG/scipy,aarchiba/scipy,vhaasteren/scipy,petebachant/scipy,WarrenWeckesser/scipy,jseabold/scipy,woodscn/scipy,surhudm/scipy,aman-iitj/scipy,apbard/scipy,mhogg/scipy,efiring/scipy,pbrod/scipy,surhudm/scipy,WarrenWeckesser/scipy,trankmichael/scipy,giorgiop/scipy,efiring/scipy,witcxc/scipy,raoulbq/scipy,fernand/scipy,cpaulik/scipy,zaxliu/scipy,woodscn/scipy,teoliphant/scipy,maciejkula/scipy,lukauskas/scipy,matthewalbani/scipy,perimosocordiae/scipy,mortonjt/scipy,scipy/scipy,aman-iitj/scipy,fredrikw/scipy,person142/scipy,ndchorley/scipy,njwilson23/scipy,pschella/scipy,matthewalbani/scipy,Shaswat27/scipy,argriffing/scipy,Gillu13/scipy,Kamp9/scipy,nvoron23/scipy,Kamp9/scipy,jakevdp/scipy,nvoron23/scipy,woodscn/scipy,bkendzior/scipy,surhudm/scipy,e-q/scipy,matthew-brett/scipy,argriffing/scipy,argriffing/scipy,vhaasteren/scipy,njwilson23/scipy,WillieMaddox/scipy,cpaulik/scipy,FRidh/scipy,dominicelse/scipy,WillieMaddox/scipy,haudren/scipy,Gillu13/scipy,sonnyhu/scipy,jor-/scipy,ales-erjavec/scipy,FRidh/scipy,jseabold/scipy,ortylp/scipy,ChanderG/scipy,juliantaylor/scipy,hainm/scipy,Eric89GXL/scipy,sriki18/scipy,pyramania/scipy,behzadnouri/scipy,rgommers/scipy,juliantaylor/scipy,aman-iitj/scipy,pbrod/scipy,pbrod/scipy,jjhelmus/scipy,ilayn/scipy,ilayn/scipy,ortylp/scipy,argriffing/scipy,sriki18/scipy,mtrbean/scipy,trankmichael/scipy,e-q/scipy,futurulus/scipy,vberaudi/scipy,teoliphant/scipy,sargas/scipy,tylerjereddy/scipy,grlee77/scipy,juliantaylor/scipy,endolith/scipy,mgaitan/scipy,scipy/scipy,ogrisel/scipy,surhudm/scipy,ilayn/scipy,ales-erjavec/scipy,mdhaber/scipy,jamestwebber/scipy,ndchorley/scipy,ndchorley/scipy,mortonjt/scipy,witcxc/scipy,anntzer/scipy,futurulus/scipy,pnedunuri/scipy,Stefan-Endres/scipy,futurulus/scipy,anielsen001/scipy,perimosocordiae/scipy,Eric89GXL/scipy,nmayorov/scipy,nmayorov/scipy,FRidh/scipy,chatcannon/scipy,jamestwebber/scipy,person142/scipy,gef756/scipy,kalvdans/scipy,mingwpy/scipy,giorgiop/scipy,minhlongdo/scipy,kleskjr/scipy,piyush0609/scipy,rmcgibbo/scipy,arokem/scipy,Dapid/scipy,mortada/scipy,WarrenWeckesser/scipy,dominicelse/scipy,WillieMaddox/scipy,anielsen001/scipy,piyush0609/scipy,jonycgn/scipy,ChanderG/scipy,zerothi/scipy,kleskjr/scipy,fredrikw/scipy,mortada/scipy,gfyoung/scipy,aman-iitj/scipy,vberaudi/scipy,anielsen001/scipy,lhilt/scipy,minhlongdo/scipy,chatcannon/scipy,jseabold/scipy,dominicelse/scipy,felipebetancur/scipy,lukauskas/scipy,mingwpy/scipy,fredrikw/scipy,nvoron23/scipy,perimosocordiae/scipy,befelix/scipy,trankmichael/scipy,mhogg/scipy,ndchorley/scipy,sauliusl/scipy,vanpact/scipy,behzadnouri/scipy,giorgiop/scipy,efiring/scipy,sriki18/scipy,lhilt/scipy,jakevdp/scipy,pizzathief/scipy,jakevdp/scipy,chatcannon/scipy,kleskjr/scipy,pyramania/scipy,vanpact/scipy,perimosocordiae/scipy,zaxliu/scipy,chatcannon/scipy,maniteja123/scipy,WillieMaddox/scipy,juliantaylor/scipy,mikebenfield/scipy,e-q/scipy,maniteja123/scipy,aeklant/scipy,mhogg/scipy,Srisai85/scipy,lhilt/scipy
73e8864e745ca75c2ea327b53244c9f2f4183e1a
lambda_function.py
lambda_function.py
#!/usr/bin/env python2 from StringIO import StringIO import boto3 from dmr_marc_users_cs750 import ( get_users, get_groups, write_contacts_csv, write_contacts_xlsx ) def s3_contacts(contacts, bucket, key): s3 = boto3.client('s3') o = StringIO() if key.endswith('.csv'): t = 'text/csv' write_contacts_csv(contacts, o) elif key.endswith('.xlsx'): t = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' write_contacts_xlsx(contacts, o) s3.put_object( Bucket=bucket, Key=key, Body=o.getvalue(), ContentType=t, ACL='public-read') o.close() def lambda_handler(event=None, context=None): users = get_users() groups = get_groups() s3_contacts(contacts=users, bucket='dmr-contacts', key='DMR_contacts.csv') s3_contacts(contacts=groups+users, bucket='dmr-contacts', key='contacts-dci.xlsx') if __name__ == '__main__': lambda_handler()
#!/usr/bin/env python2 from StringIO import StringIO import boto3 from dmr_marc_users_cs750 import ( get_users, get_groups, write_contacts_csv, write_contacts_xlsx, ) from dmrx_most_heard_n0gsg import ( get_users as get_most_heard, write_n0gsg_csv, ) def s3_contacts(contacts, bucket, key): s3 = boto3.client('s3') o = StringIO() if key.endswith('.csv'): t = 'text/csv' if key.startswith('N0GSG/'): write_n0gsg_csv(contacts, o) else: write_contacts_csv(contacts, o) elif key.endswith('.xlsx'): t = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' write_contacts_xlsx(contacts, o) s3.put_object( Bucket=bucket, Key=key, Body=o.getvalue(), ContentType=t, ACL='public-read') o.close() def lambda_handler(event=None, context=None): marc = get_users() dmrx = get_most_heard() groups = get_groups() s3_contacts(contacts=marc, bucket='dmr-contacts', key='CS750/DMR_contacts.csv') s3_contacts(contacts=groups+marc, bucket='dmr-contacts', key='CS750/dci-bm-marc.xlsx') s3_contacts(contacts=dmrx, bucket='dmr-contacts', key='N0GSG/dmrx-most-heard.csv') if __name__ == '__main__': lambda_handler()
Add N0GSG DMRX MostHeard to AWS Lambda function
Add N0GSG DMRX MostHeard to AWS Lambda function
Python
apache-2.0
ajorg/DMR_contacts
6dfb0c1ea4fb3d12d14a07d0e831eb32f3b2f340
yaml_argparse.py
yaml_argparse.py
import argparse import yaml def parse_arguments_based_on_yaml(yaml_file): with open(yaml_file) as f: yaml_data = yaml.load(f) # to start with, support only a single parameter key = list(yaml_data.keys())[0] value = yaml_data[key] parser = argparse.ArgumentParser() parser.add_argument("-{}".format(key), default=value) args = parser.parse_args() return args
import argparse import yaml def parse_arguments_based_on_yaml(yaml_file): with open(yaml_file) as f: yaml_data = yaml.load(f) parser = argparse.ArgumentParser() for key, value in yaml_data.items(): parser.add_argument("-{}".format(key), default=value) args = parser.parse_args() return args
Implement creating arguments for multiple strings
Implement creating arguments for multiple strings
Python
mit
krasch/yaml_argparse,krasch/quickargs
806e3b4f92fdc72a83cac18d338d7293673f9650
yolk/__init__.py
yolk/__init__.py
"""yolk Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.6.1'
"""yolk Author: Rob Cakebread <cakebread at gmail> License : BSD """ __version__ = '0.6.2'
Increment patch version to 0.6.2
Increment patch version to 0.6.2
Python
bsd-3-clause
myint/yolk,myint/yolk
fbcdd58775be1b6a72e1d1415f62a7bfade3dbd1
pages/views.py
pages/views.py
from django.http import Http404 from django.shortcuts import get_object_or_404 from django.contrib.sites.models import SITE_CACHE from pages import settings from pages.models import Page, Content from pages.utils import auto_render, get_language_from_request def details(request, page_id=None, slug=None, template_name=settings.DEFAULT_PAGE_TEMPLATE): lang = get_language_from_request(request) site = request.site pages = Page.objects.navigation(site).order_by("tree_id") if pages: if page_id: current_page = get_object_or_404( Page.objects.published(site), pk=page_id) elif slug: slug_content = Content.objects.get_page_slug(slug, site) if slug_content and \ slug_content.page.calculated_status in ( Page.PUBLISHED, Page.HIDDEN): current_page = slug_content.page else: raise Http404 else: current_page = pages[0] template_name = current_page.get_template() else: raise Http404 return template_name, locals() details = auto_render(details)
from django.http import Http404 from django.shortcuts import get_object_or_404 from django.contrib.sites.models import SITE_CACHE from pages import settings from pages.models import Page, Content from pages.utils import auto_render, get_language_from_request def details(request, page_id=None, slug=None, template_name=settings.DEFAULT_PAGE_TEMPLATE): """ Example view that get the root pages for navigation, and the current page if there is any root page. All is rendered with the current page's template. """ lang = get_language_from_request(request) site = request.site pages = Page.objects.navigation(site).order_by("tree_id") if pages: if page_id: current_page = get_object_or_404( Page.objects.published(site), pk=page_id) elif slug: slug_content = Content.objects.get_page_slug(slug, site) if slug_content and \ slug_content.page.calculated_status in ( Page.PUBLISHED, Page.HIDDEN): current_page = slug_content.page else: raise Http404 else: current_page = pages[0] template_name = current_page.get_template() else: raise Http404 return template_name, locals() details = auto_render(details)
Add documentation to the default view
Add documentation to the default view git-svn-id: 54fea250f97f2a4e12c6f7a610b8f07cb4c107b4@292 439a9e5f-3f3e-0410-bc46-71226ad0111b
Python
bsd-3-clause
remik/django-page-cms,akaihola/django-page-cms,oliciv/django-page-cms,akaihola/django-page-cms,akaihola/django-page-cms,remik/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,pombredanne/django-page-cms-1,pombredanne/django-page-cms-1,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,remik/django-page-cms
eae4b06bd798eab3a46bdd5b7452411bb7fb02e1
dashcam.py
dashcam.py
# dashcam.py # A Raspberry Pi powered, GPS enabled, 3D printed bicycle dashcam # By Matthew Timmons-Brown, The Raspberry Pi Guy import pygame import picamera import os os.putenv('SDL_VIDEODRIVER', 'fbcon') os.putenv('SDL_FBDEV' , '/dev/fb1') os.putenv('SDL_MOUSEDRV' , 'TSLIB') os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen') pygame.init() pygame.mouse.set_visible(False) screen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)
# dashcam.py # A Raspberry Pi powered, GPS enabled, 3D printed bicycle dashcam # By Matthew Timmons-Brown, The Raspberry Pi Guy import pygame import picamera import os import sys import io os.putenv('SDL_VIDEODRIVER', 'fbcon') os.putenv('SDL_FBDEV' , '/dev/fb1') os.putenv('SDL_MOUSEDRV' , 'TSLIB') os.putenv('SDL_MOUSEDEV' , '/dev/input/touchscreen') size = width, height = 320, 240 pygame.init() pygame.mouse.set_visible(False) screen = pygame.display.set_mode(size) go_button = pygame.image.load("/home/pi/bike_dashcam/media/go.bmp")
Update dascham with pygame GO button load
Update dascham with pygame GO button load
Python
mit
the-raspberry-pi-guy/bike_dashcam,the-raspberry-pi-guy/bike_dashcam
b35d4292e50e8a8dc56635bddeac5a1fc42a5d19
tveebot_tracker/source.py
tveebot_tracker/source.py
from abc import ABC, abstractmethod class TVShowNotFound(Exception): """ Raised when a reference does not match any TV Show available """ class EpisodeSource(ABC): """ Abstract base class to define the interface for and episode source. An episode source is used by the tracker to obtain episode files. A source is usually based on a feed that provides links to TV Show's episodes. Every source has its own protocol to obtain the information and it uses its own format to present that information. Implementations of this interface are responsible for implementing the details of how to obtain the episode files' information and present them to the tracker. """ # Called by the tracker when it wants to get the episodes available for # a specific TVShow @abstractmethod def get_episodes_for(self, tvshow_reference: str) -> list: """ Retrieve all available episode files corresponding to the specified TV show. Multiple files for the same episode may be retrieved. The TV show to obtain the episodes from is identified by some reference that uniquely identifies it within the episode source in question. :param tvshow_reference: reference that uniquely identifies the TV show to get the episodes for :return: a list containing all episode files available for the specified TV Show. An empty list if none is found. :raise TVShowNotFound: if the specified reference does not match to any TV show available """
from abc import ABC, abstractmethod class TVShowNotFound(Exception): """ Raised when a reference does not match any TV Show available """ class EpisodeSource(ABC): """ Abstract base class to define the interface for and episode source. An episode source is used by the tracker to obtain episode files. A source is usually based on a feed that provides links to TV Show's episodes. Every source has its own protocol to obtain the information and it uses its own format to present that information. Implementations of this interface are responsible for implementing the details of how to obtain the episode files' information and present them to the tracker. """ # Called by the tracker when it wants to get the episodes available for # a specific TVShow @abstractmethod def fetch(self, tvshow_reference: str) -> list: """ Fetches all available episode files, corresponding to the specified TV show. Multiple files for the same episode may be retrieved. The TV show to obtain the episodes from is identified by some reference that uniquely identifies it within the episode source in question. :param tvshow_reference: reference that uniquely identifies the TV show to get the episodes for :return: a list containing all episode files available for the specified TV Show. An empty list if none is found. :raise TVShowNotFound: if the specified reference does not match to any TV show available """
Rename Source's get_episodes_for() method to fetch()
Rename Source's get_episodes_for() method to fetch()
Python
mit
tveebot/tracker
c7601ed4144b12717f536f2fc2fc0ddb5745ec27
opentaxii/auth/sqldb/models.py
opentaxii/auth/sqldb/models.py
import hmac import bcrypt from sqlalchemy.schema import Column from sqlalchemy.types import Integer, String from sqlalchemy.ext.declarative import declarative_base __all__ = ['Base', 'Account'] Base = declarative_base() MAX_STR_LEN = 256 class Account(Base): __tablename__ = 'accounts' id = Column(Integer, primary_key=True) username = Column(String(MAX_STR_LEN), unique=True) password_hash = Column(String(MAX_STR_LEN)) def set_password(self, password): if isinstance(password, unicode): password = password.encode('utf-8') self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) def is_password_valid(self, password): if isinstance(password, unicode): password = password.encode('utf-8') hashed = self.password_hash.encode('utf-8') return hmac.compare_digest(bcrypt.hashpw(password, hashed), hashed)
import bcrypt from sqlalchemy.schema import Column from sqlalchemy.types import Integer, String from sqlalchemy.ext.declarative import declarative_base from werkzeug.security import safe_str_cmp __all__ = ['Base', 'Account'] Base = declarative_base() MAX_STR_LEN = 256 class Account(Base): __tablename__ = 'accounts' id = Column(Integer, primary_key=True) username = Column(String(MAX_STR_LEN), unique=True) password_hash = Column(String(MAX_STR_LEN)) def set_password(self, password): if isinstance(password, unicode): password = password.encode('utf-8') self.password_hash = bcrypt.hashpw(password, bcrypt.gensalt()) def is_password_valid(self, password): if isinstance(password, unicode): password = password.encode('utf-8') hashed = self.password_hash.encode('utf-8') return safe_str_cmp(bcrypt.hashpw(password, hashed), hashed)
Use Werkzeug's safe_str_cmp() instead of hmac.compare_digest()
Use Werkzeug's safe_str_cmp() instead of hmac.compare_digest() Werkzeug will use the latter on Python >2.7.7, and provides a fallback for older Python versions.
Python
bsd-3-clause
EclecticIQ/OpenTAXII,Intelworks/OpenTAXII,EclecticIQ/OpenTAXII,Intelworks/OpenTAXII
21bff1460ae71c1664ab3fbccd678bc5be0b8dd3
pirate_add_shift_recurrence.py
pirate_add_shift_recurrence.py
#!/usr/bin/python import sys import os from tasklib import TaskWarrior time_attributes = ('wait', 'scheduled') def is_new_local_recurrence_child_task(task): # Do not affect tasks not spun by recurrence if not task['parent']: return False # Newly created recurrence tasks actually have # modified field copied from the parent, thus # older than entry field (until their ID is generated) if (task['modified'] - task['entry']).total_seconds() < 0: return True tw = TaskWarrior(data_location=os.path.dirname(os.path.dirname(sys.argv[0]))) tw.overrides.update(dict(recurrence="no", hooks="no")) def hook_shift_recurrence(task): if is_new_local_recurrence_child_task(task): parent = tw.tasks.get(uuid=task['parent']['uuid']) parent_due_shift = task['due'] - parent['due'] for attr in time_attributes: if parent[attr]: task[attr] = parent[attr] + parent_due_shift
#!/usr/bin/python import sys import os from tasklib import TaskWarrior time_attributes = ('wait', 'scheduled', 'until') def is_new_local_recurrence_child_task(task): # Do not affect tasks not spun by recurrence if not task['parent']: return False # Newly created recurrence tasks actually have # modified field copied from the parent, thus # older than entry field (until their ID is generated) if (task['modified'] - task['entry']).total_seconds() < 0: return True tw = TaskWarrior(data_location=os.path.dirname(os.path.dirname(sys.argv[0]))) tw.overrides.update(dict(recurrence="no", hooks="no")) def hook_shift_recurrence(task): if is_new_local_recurrence_child_task(task): parent = tw.tasks.get(uuid=task['parent']['uuid']) parent_due_shift = task['due'] - parent['due'] for attr in time_attributes: if parent[attr]: task[attr] = parent[attr] + parent_due_shift
Add "until" attribute to list handled by hook
Add "until" attribute to list handled by hook TaskWarrior 2.5.1 (and possibly earlier versions) does not shift the "until" attribute appropriately during recurrence. This hook provides a workaround for that. Fixes [#6](https://github.com/tbabej/task.shift-recurrence/issues/6).
Python
mit
tbabej/task.shift-recurrence
0a51f23417034e6cdada4ac01e3d3dcf8026f822
xbob/blitz/__init__.py
xbob/blitz/__init__.py
#!/usr/bin/env python # vim: set fileencoding=utf-8 : # Andre Anjos <[email protected]> # Fri 20 Sep 14:45:01 2013 """Blitz++ Array bindings for Python""" from ._library import array, as_blitz, __version__, __api_version__, versions def get_include(): """Returns the directory containing the C/C++ API include directives""" return __import__('pkg_resources').resource_filename(__name__, 'include')
#!/usr/bin/env python # vim: set fileencoding=utf-8 : # Andre Anjos <[email protected]> # Fri 20 Sep 14:45:01 2013 """Blitz++ Array bindings for Python""" from ._library import array, as_blitz, __version__, __api_version__, versions def get_include(): """Returns the directory containing the C/C++ API include directives""" return __import__('pkg_resources').resource_filename(__name__, 'include') # gets sphinx autodoc done right - don't remove it __all__ = [_ for _ in dir() if not _.startswith('_')]
Fix python3 compatibility issues with doc fix
Fix python3 compatibility issues with doc fix
Python
bsd-3-clause
tiagofrepereira2012/bob.blitz,tiagofrepereira2012/bob.blitz,tiagofrepereira2012/bob.blitz
30c21806dcc347326d6ac51be2adac9ff637f241
day20/part1.py
day20/part1.py
ranges = [] for line in open('input.txt', 'r'): ranges.append(tuple(map(int, line.split('-')))) ranges.sort() lowest = 0 for l, r in ranges: if l <= lowest <= r: lowest = r + 1 print(lowest) input()
ranges = [] for line in open('input.txt', 'r'): ranges.append(tuple(map(int, line.split('-')))) ranges.sort() lowest = 0 for l, r in ranges: if l > lowest: break if lowest <= r: lowest = r + 1 print(lowest) input()
Break the loop at the first gap
Break the loop at the first gap
Python
unlicense
ultramega/adventofcode2016
4a75df6e253401cbed7b31e1882211946f02093a
src/ggrc/__init__.py
src/ggrc/__init__.py
# Copyright (C) 2016 Google Inc., authors, and contributors # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> from .bootstrap import db, logger
# Copyright (C) 2016 Google Inc., authors, and contributors # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> from ggrc.bootstrap import db __all__ = [ db ]
Remove logger from ggrc init
Remove logger from ggrc init The logger from ggrc init is never used and should be removed.
Python
apache-2.0
selahssea/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core
3885fcbb31393f936bc58842dc06bdc9ffe55151
fabfile.py
fabfile.py
#!/usr/bin/env python from fabric.api import env, run, sudo, task from fabric.context_managers import cd, prefix env.use_ssh_config = True home = '~/jarvis2' @task def pull_code(): with cd(home): run('git pull --rebase') @task def update_dependencies(): with prefix('workon jarvis2'): run('pip install --use-mirrors -r %s/requirements.txt' % (home,)) @task def restart_server(): sudo('/etc/init.d/uwsgi restart', pty=False) @task def restart_client(): run('pkill -x midori') @task(default=True) def deploy(update_deps=False): pull_code() if update_deps: update_dependencies() restart_server() restart_client() @task def full_deploy(): deploy(True)
#!/usr/bin/env python from fabric.api import env, run, sudo, task from fabric.context_managers import cd, prefix from fabric.contrib.project import rsync_project env.use_ssh_config = True home = '~/jarvis2' @task def pull_code(): with cd(home): run('git pull --rebase') @task def push_code(): rsync_project(local_dir='.', remote_dir=home, exclude=('.git', '.vagrant'), extra_opts='--filter=":- .gitignore"') @task def update_dependencies(): with prefix('workon jarvis2'): run(('pip install --quiet --use-mirrors --upgrade' ' -r {home}/requirements.txt').format(home=home)) @task def restart_server(): sudo('/etc/init.d/uwsgi restart', pty=False) @task def restart_client(): run('pkill -x midori') @task(default=True) def deploy(update_deps=False): push_code() if update_deps: update_dependencies() restart_server() restart_client() @task def full_deploy(): deploy(True)
Add task for pushing code with rsync
Add task for pushing code with rsync
Python
mit
Foxboron/Frank,mpolden/jarvis2,martinp/jarvis2,Foxboron/Frank,mpolden/jarvis2,mpolden/jarvis2,martinp/jarvis2,Foxboron/Frank,martinp/jarvis2
8af0f327b0f9b975f9fc05e41aff2f99bb26abce
people/serializers.py
people/serializers.py
from django.contrib.gis import serializers from rest_framework import serializers from people.models import Customer from people.models import InternalUser class CustomerSerializer(serializers.ModelSerializer): phone_number = serializers.IntegerField() def validate_phone_number(self, val): if len(str(val)) != 10: raise serializers.ValidationError('The phone number must be 10 digits long') class Meta: model = Customer fields = '__all__' class InternalUserSerializer(serializers.ModelSerializer): class Meta: model = InternalUser fields = '__all__'
from django.contrib.gis import serializers from rest_framework import serializers from people.models import Customer from people.models import InternalUser class CustomerSerializer(serializers.ModelSerializer): phone_number = serializers.IntegerField() def validate_phone_number(self, val): if len(str(val)) != 10: raise serializers.ValidationError('The phone number must be 10 digits long') return val class Meta: model = Customer fields = '__all__' class InternalUserSerializer(serializers.ModelSerializer): class Meta: model = InternalUser fields = '__all__'
Fix the phone number thing
Fix the phone number thing
Python
apache-2.0
rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory
5e1daf36d604ee1898e8486458013e63010d6888
opps/api/models.py
opps/api/models.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import uuid import hmac from django.db import models from django.conf import settings from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL) key = models.CharField(u"Key", max_length=255) date_insert = models.DateTimeField(u"Date insert", auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, User)
#!/usr/bin/env python # -*- coding: utf-8 -*- import uuid import hmac from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ from django.contrib.auth import get_user_model try: from hashlib import sha1 except ImportError: import sha sha1 = sha.sha User = get_user_model() class ApiKey(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u"User")) key = models.CharField(_(u"Key"), max_length=255) date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True) def __unicode__(self): return u"{} for {}".format(self.key, self.user) def save(self, *args, **kwargs): if not self.key: self.key = self.generate_key() return super(ApiKey, self).save(*args, **kwargs) def generate_key(self): new_uuid = uuid.uuid4() return hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest() class Meta: verbose_name = _(u"API Key") verbose_name_plural = _(u"API Keys") def create_api_key(sender, **kwargs): if kwargs.get('created') is True: ApiKey.objects.create(user=kwargs.get('instance')) models.signals.post_save.connect(create_api_key, User)
Add missing translations on API model
Add missing translations on API model
Python
mit
YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,opps/opps,opps/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,williamroot/opps
e5deebe61fdf5e1a186673a252743ebdabe4c0e5
publishconf.py
publishconf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'https://pappasam.github.io' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = False # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = ""
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'https://pappasam.github.io' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = False DISQUS_SITENAME = "pappasam-github-io" GOOGLE_ANALYTICS = "UA-117115805-1"
Add Disqus and Google Analytics to web site
Add Disqus and Google Analytics to web site
Python
mit
pappasam/pappasam.github.io,pappasam/pappasam.github.io
decb1699fe036c55d33c7d3b77a834cf8c3ee785
RPLCD/__init__.py
RPLCD/__init__.py
from .common import Alignment, CursorMode, ShiftMode, BacklightMode from .contextmanagers import cursor, cleared
import warnings from .common import Alignment, CursorMode, ShiftMode, BacklightMode from .contextmanagers import cursor, cleared from .gpio import CharLCD as GpioCharLCD class CharLCD(GpioCharLCD): def __init__(self, *args, **kwargs): warnings.warn("Using RPLCD.CharLCD directly is deprecated. " + "Use RPLCD.gpio.CharLCD instead!", DeprecationWarning) super(CharLCD, self).__init__(*args, **kwargs)
Add backwards compatible CharLCD wrapper
Add backwards compatible CharLCD wrapper
Python
mit
GoranLundberg/RPLCD,thijstriemstra/RPLCD,dbrgn/RPLCD,paulenuta/RPLCD
d3675b777dc95f296f26bdd9b8b05311ceac6ba5
cyder/core/system/migrations/0006_rename_table_from_system_key_value_to_system_kv.py
cyder/core/system/migrations/0006_rename_table_from_system_key_value_to_system_kv.py
# -*- coding: utf-8 -*- from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): db.rename_table('system_key_value', 'system_kv') def backwards(self, orm): db.rename_table('system_kv', 'system_key_value')
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): db.rename_table('system_key_value', 'system_kv') def backwards(self, orm): db.rename_table('system_kv', 'system_key_value') models = { 'system.system': { 'Meta': {'object_name': 'System', 'db_table': "'system'"}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'system.systemkeyvalue': { 'Meta': {'unique_together': "(('key', 'value', 'system'),)", 'object_name': 'SystemKeyValue', 'db_table': "'system_kv'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_quoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['system.System']"}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['system']
Add ORM freeze thing to SystemKeyValue migration
Add ORM freeze thing to SystemKeyValue migration
Python
bsd-3-clause
akeym/cyder,murrown/cyder,zeeman/cyder,akeym/cyder,OSU-Net/cyder,murrown/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder
5e2111a5ccc0bcbe7b9af4fec09b9b46eb03ebd3
GenNowPlayingMovieID.py
GenNowPlayingMovieID.py
#!/usr/bin/python #coding: utf-8 import requests import re if __name__=="__main__": page = requests.get('https://movie.douban.com/nowplaying/beijing/') content=page.text.encode("utf-8") pattern=re.compile(r'(?<=id=")\d+(?="\n)') result=pattern.findall(content) for iterm in result: print iterm
#!/usr/bin/python #coding: utf-8 import requests import re import time from time import gmtime, strftime class GenNowPlayingID(object): """docstring for ClassName""" def __init__(self): #super(ClassName, self).__init__() # self.arg = arg pass def GenNowPlayingIdList(self): page = requests.get('https://movie.douban.com/nowplaying/beijing/') content=page.text.encode("utf-8") pattern=re.compile(r'(?<=id=")\d+(?="\n)') result=pattern.findall(content) currentTime = strftime("%Y%m%d_%H:%M:%S", time.localtime(time.time())) print currentTime filename = './nowplaying_id/' + currentTime + '.id' fp = open(filename,'w') for iterm in result: fp.write(iterm+"\n") fp.close() if __name__=="__main__": genNowPlayingID = GenNowPlayingID() genNowPlayingID.GenNowPlayingIdList()
Write the nowplaying movie id to file
Write the nowplaying movie id to file
Python
apache-2.0
ModernKings/MKMovieCenter,ModernKings/MKMovieCenter,ModernKings/MKMovieCenter
442f0df33b91fced038e2c497e6c03e0f82f55b2
qtpy/QtTest.py
qtpy/QtTest.py
# -*- coding: utf-8 -*- # # Copyright © 2014-2015 Colin Duquesnoy # Copyright © 2009- The Spyder Developmet Team # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Provides QtTest and functions .. warning:: PySide is not supported here, that's why there is not unit tests running with PySide. """ from qtpy import PYQT5, PYQT4, PYSIDE, PythonQtError if PYQT5: from PyQt5.QtTest import QTest elif PYQT4: from PyQt4.QtTest import QTest as OldQTest class QTest(OldQTest): @staticmethod def qWaitForWindowActive(QWidget): OldQTest.qWaitForWindowShown(QWidget) elif PYSIDE: raise ImportError('QtTest support is incomplete for PySide') else: raise PythonQtError('No Qt bindings could be found')
# -*- coding: utf-8 -*- # # Copyright © 2014-2015 Colin Duquesnoy # Copyright © 2009- The Spyder Developmet Team # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Provides QtTest and functions """ from qtpy import PYQT5, PYQT4, PYSIDE, PythonQtError if PYQT5: from PyQt5.QtTest import QTest elif PYQT4: from PyQt4.QtTest import QTest as OldQTest class QTest(OldQTest): @staticmethod def qWaitForWindowActive(QWidget): OldQTest.qWaitForWindowShown(QWidget) elif PYSIDE: from PySide.QtTest import QTest else: raise PythonQtError('No Qt bindings could be found')
Add support for QTest with PySide
Add support for QTest with PySide
Python
mit
spyder-ide/qtpy,davvid/qtpy,goanpeca/qtpy,davvid/qtpy,goanpeca/qtpy
3b4ec8893fe40f811716a893f11858f7bbf2ec80
utils/get_message.py
utils/get_message.py
import amqp from contextlib import closing def __get_channel(connection): return connection.channel() def __declare_queue(channel, queue): channel.queue_declare(queue=queue, durable=True, auto_delete=False) def __get_message_from_queue(channel, queue): return channel.basic_get(queue=queue) def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. The queue is declared if one of the same name does not already exist. If one of the same name does already exist but has different parameters, an error is raised. The queue has durable=True and auto_delete=False set as default. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = __get_channel(connection) __declare_queue(channel, queue) return __get_message_from_queue(channel, queue)
import amqp from contextlib import closing def __get_channel(connection): return connection.channel() def __get_message_from_queue(channel, queue): return channel.basic_get(queue=queue) def get_message(queue): """ Get the first message from a queue. The first message from a queue is retrieved. If there is no such message, the function exits quietly. :param queue: The name of the queue from which to get the message. Usage:: >>> from utils import get_message >>> message = get_message('queue') """ with closing(amqp.Connection()) as connection: channel = __get_channel(connection) return __get_message_from_queue(channel, queue)
Revert "Revert "Remove queue declaration (EAFP)""
Revert "Revert "Remove queue declaration (EAFP)"" This reverts commit 2dd5a5b422d8c1598672ea9470aee655eca3c49d.
Python
mit
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
3a0cf1f6114d6c80909f90fe122b026908200b0a
IPython/nbconvert/exporters/markdown.py
IPython/nbconvert/exporters/markdown.py
"""Markdown Exporter class""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.config import Config from .templateexporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class MarkdownExporter(TemplateExporter): """ Exports to a markdown document (.md) """ def _file_extension_default(self): return 'md' def _template_file_default(self): return 'markdown' output_mimetype = 'text/markdown' def _raw_mimetypes_default(self): return ['text/markdown', 'text/html', ''] @property def default_config(self): c = Config({ 'NbConvertBase': { 'display_data_priority': ['html', 'application/pdf', 'svg', 'latex', 'png', 'jpg', 'jpeg' , 'text'] }, 'ExtractOutputPreprocessor': { 'enabled':True} }) c.merge(super(MarkdownExporter,self).default_config) return c
"""Markdown Exporter class""" #----------------------------------------------------------------------------- # Copyright (c) 2013, the IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- from IPython.config import Config from .templateexporter import TemplateExporter #----------------------------------------------------------------------------- # Classes #----------------------------------------------------------------------------- class MarkdownExporter(TemplateExporter): """ Exports to a markdown document (.md) """ def _file_extension_default(self): return 'md' def _template_file_default(self): return 'markdown' output_mimetype = 'text/markdown' def _raw_mimetypes_default(self): return ['text/markdown', 'text/html', ''] @property def default_config(self): c = Config({'ExtractOutputPreprocessor':{'enabled':True}}) c.merge(super(MarkdownExporter,self).default_config) return c
Revert "Removed Javascript from Markdown by adding display priority to def config."
Revert "Removed Javascript from Markdown by adding display priority to def config." This reverts commit 58e05f9625c60f8deba9ddf1c74dba73e8ea7dd1.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
b922273cb4786e72dbf018b33100814e2a462ebe
examples/list_stats.py
examples/list_stats.py
import sys import os import operator sys.path.insert(1, os.path.abspath('..')) from wsinfo import Info cnt = 0 max_cnt = 100 servers = {} with open("urls.txt", "r") as f: for url in f.readlines(): url = url[:-1] try: w = Info(url) if w.server != "": if not w.server in servers: servers[w.server] = 1 else: servers[w.server] += 1 print("{:35} {:15} {:3} {:15}".format( w._url, w.ip, w.http_status_code, w.server)) except Exception as e: print("{:30} {}".format(url, e)) cnt += 1 if cnt >= max_cnt: break print("="*80) print("Web server ranking:") rank = sorted(servers.items(), key=operator.itemgetter(1), reverse=True) for n in range(10): print("#{:2} {} ({})".format(n+1, rank[n][0], rank[n][1]))
# -*- coding: utf-8 -*- import sys import os import operator sys.path.insert(1, os.path.abspath('..')) from wsinfo import Info cnt = 0 max_cnt = 100 servers = {} with open("urls.txt", "r") as f: for url in f.readlines(): url = url[:-1] try: w = Info(url) if w.server != "": if not w.server in servers: servers[w.server] = 1 else: servers[w.server] += 1 print("{:35} {:15} {:3} {:15}".format( w._url, w.ip, w.http_status_code, w.server)) except Exception as e: print("{:35} {}".format(url, e)) cnt += 1 if cnt >= max_cnt: break print("="*80) print("Web server ranking:") rank = sorted(servers.items(), key=operator.itemgetter(1), reverse=True) for n in range(len(rank)): print("#{:2} {} ({})".format(n+1, rank[n][0], rank[n][1]))
Add encoding line for Python 3
Fix: Add encoding line for Python 3
Python
mit
linusg/wsinfo
23c53752412e0aa8b2d253efbc7f983560748f3d
examples/test-index.py
examples/test-index.py
import unittest import saliweb.test # Import the modfoo frontend with mocks modfoo = saliweb.test.import_mocked_frontend("modfoo", __file__, '../../frontend') class Tests(saliweb.test.TestCase): def test_index(self): """Test index page""" c = modfoo.app.test_client() rv = c.get('/') self.assertIn('ModFoo: Modeling using Foo', rv.data) if __name__ == '__main__': unittest.main()
import unittest import saliweb.test # Import the modfoo frontend with mocks modfoo = saliweb.test.import_mocked_frontend("modfoo", __file__, '../../frontend') class Tests(saliweb.test.TestCase): def test_index(self): """Test index page""" c = modfoo.app.test_client() rv = c.get('/') self.assertIn(b'ModFoo: Modeling using Foo', rv.data) if __name__ == '__main__': unittest.main()
Use bytes to check Flask response
Use bytes to check Flask response The Flask response is a byte (not Unicode) string, so we should explicitly compare it to a bytes constant so the test works in both Python 2 and Python 3.
Python
lgpl-2.1
salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb
a8cefc5330909150b9ad2ee04d08b8fff8a51108
disposable_email_checker/forms.py
disposable_email_checker/forms.py
# -*- coding: utf-8 -*- from django import forms from django.core import validators from .validators import validate_disposable_email class DisposableEmailField(forms.CharField): default_validators = [validators.validate_email, validate_disposable_email]
# -*- coding: utf-8 -*- from django import forms from django.core import validators from .validators import validate_disposable_email class DisposableEmailField(forms.EmailField): default_validators = [validators.validate_email, validate_disposable_email]
Update form field to EmailField not CharField
Update form field to EmailField not CharField
Python
bsd-3-clause
aaronbassett/DisposableEmailChecker
9657f6f428134a47bfbb41f889305dff355551d8
fablab-businessplan.py
fablab-businessplan.py
# -*- encoding: utf-8 -*- # # Author: Massimo Menichinelli # Homepage: http://www.openp2pdesign.org # License: MIT # import xlsxwriter workbook = xlsxwriter.Workbook('FabLab-BusinessPlan.xlsx') worksheet = workbook.add_worksheet() worksheet.write('A1', 'Hello world') workbook.close()
# -*- encoding: utf-8 -*- # # Author: Massimo Menichinelli # Homepage: http://www.openp2pdesign.org # License: MIT # import xlsxwriter # Create the file workbook = xlsxwriter.Workbook('FabLab-BusinessPlan.xlsx') # Create the worksheets expenses = workbook.add_worksheet('Expenses') activities = workbook.add_worksheet('Activities') membership = workbook.add_worksheet('Membership') total = workbook.add_worksheet('Total') # Add content to the Expenses worksheet expenses.write('A1', 'Hello world') # Save and close the file workbook.close()
Add first structure of the script
Add first structure of the script
Python
mit
openp2pdesign/FabLab-BusinessPlan
8c34cc43d23e0d97c531e1aa5d2339693db554e0
projects/projectdl.py
projects/projectdl.py
#!/usr/bin/python3 from bs4 import BeautifulSoup import requests r = requests.get("https://projects.archlinux.org/") soup = BeautifulSoup(r.text) repos = soup.select(".sublevel-repo a") repo_names = [] for repo in repos: repo_name = repo.string if repo_name[-4:] == ".git": repo_name = repo_name[:-4] repo_names.append(repo_name) with open("projects.txt", mode = "w", encoding = "utf-8") as projects_file: for repo_name in repo_names: projects_file.write(repo_name + "\n")
#!/usr/bin/python3 from bs4 import BeautifulSoup import requests import simplediff from pprint import pprint r = requests.get("https://projects.archlinux.org/") soup = BeautifulSoup(r.text) repos = soup.select(".sublevel-repo a") with open("projects.txt", mode = "r", encoding = "utf-8") as projects_file: cur_repos = projects_file.readlines() new_repos = [] for repo in repos: repo_name = repo.string if repo_name[-4:] == ".git": repo_name = repo_name[:-4] new_repos.append(repo_name + "\n") repo_diff = simplediff.string_diff(''.join(cur_repos), ''.join(new_repos)) added = [] removed = [] for (diff_type, values) in repo_diff: if diff_type == "+": added.extend(values) elif diff_type == "-": removed.extend(values) if added: print("Added:") pprint(added) if removed: print("Removed:") pprint(removed) if added or removed: with open("projects.txt", mode = "w", encoding = "utf-8") as projects_file: for repo_name in new_repos: projects_file.write(repo_name) else: print("No projects were added or removed.")
Update project downloader to do diffs before overwriting
Update project downloader to do diffs before overwriting
Python
unlicense
djmattyg007/archlinux,djmattyg007/archlinux
14e98bc2038f50f38244550a1fa7ec3f836ed5f3
http/online_checker.py
http/online_checker.py
import http.client def __is_online(domain, sub_path, response_status, response_reason): conn = http.client.HTTPSConnection(domain, timeout=1) conn.request("HEAD", sub_path) response = conn.getresponse() conn.close() return (response.status == response_status) and (response.reason == response_reason) def is_rm_doc_online(): return __is_online("docs.rainmeter.net", "/manual-beta/", 200, "OK") def is_gh_online(): return __is_online("github.com", "/", 200, "OK") def is_gh_raw_online(): """ Check if the raw content delivery from Github is online. It is routed to 301 and Moved Permanently because per standard it is routed to github.com because it natively only accepts real content paths. We do not follow reroutes else it would be 200 OK on github.com but we already have another method to check for that and Github.com is on a different service than the content delivery. """ return __is_online("raw.githubusercontent.com", "/", 301, "Moved Permanently")
""" This module handles every related to online checking. We need to request several information from various providers. We could just try to request them, but instead you can ping them first and check if they are even reachable. This does not mean, that do not need to handle a failure on their part (e.g. if the server is responding, but can't deliver the information). """ import http.client def __is_online(domain, sub_path, response_status, response_reason): conn = http.client.HTTPSConnection(domain, timeout=1) conn.request("HEAD", sub_path) response = conn.getresponse() conn.close() return (response.status == response_status) and (response.reason == response_reason) def is_rm_doc_online(): """ Check if the Rainmeter documentation page is online. The Rainmeter online documentation is required to synchronize the local model with the latest online version. These information are stored and parsed to display them as a tooltip on special constructs. """ return __is_online("docs.rainmeter.net", "/manual-beta/", 200, "OK") def is_gh_online(): """ Check if GitHub is online. The different services of GitHub are running in seperat services and thus just being GitHub online does not mean, that required parts are online. """ return __is_online("github.com", "/", 200, "OK") def is_gh_raw_online(): """ Check if the raw content delivery from Github is online. It is routed to 301 and Moved Permanently because per standard it is routed to github.com because it natively only accepts real content paths. We do not follow reroutes else it would be 200 OK on github.com but we already have another method to check for that and Github.com is on a different service than the content delivery. """ return __is_online("raw.githubusercontent.com", "/", 301, "Moved Permanently")
Add docstring to online checker
Add docstring to online checker
Python
mit
thatsIch/sublime-rainmeter
72e3e1177dc23b4f3d358294d68b58c01d7c5931
stevedore/__init__.py
stevedore/__init__.py
# flake8: noqa __all__ = [ 'ExtensionManager', 'EnabledExtensionManager', 'NamedExtensionManager', 'HookManager', 'DriverManager', ] from .extension import ExtensionManager from .enabled import EnabledExtensionManager from .named import NamedExtensionManager from .hook import HookManager from .driver import DriverManager import logging # Configure a NullHandler for our log messages in case # the app we're used from does not set up logging. LOG = logging.getLogger('stevedore') if hasattr(logging, 'NullHandler'): LOG.addHandler(logging.NullHandler()) else: class NullHandler(logging.Handler): def handle(self, record): pass def emit(self, record): pass def createLock(self): self.lock = None LOG.addHandler(NullHandler())
# flake8: noqa __all__ = [ 'ExtensionManager', 'EnabledExtensionManager', 'NamedExtensionManager', 'HookManager', 'DriverManager', ] from .extension import ExtensionManager from .enabled import EnabledExtensionManager from .named import NamedExtensionManager from .hook import HookManager from .driver import DriverManager import logging # Configure a NullHandler for our log messages in case # the app we're used from does not set up logging. LOG = logging.getLogger('stevedore') LOG.addHandler(logging.NullHandler())
Remove work around for NullHandler
Remove work around for NullHandler logging module added NullHandler in Python 2.7, we have dropped Python 2.6 support now, so don't need the work around any more. Change-Id: Ib6fdbc2f92cd66f4846243221e696f1b1fa712df
Python
apache-2.0
openstack/stevedore
bda756847e31e97eb8363f48bed67035a3f46d67
settings/travis.py
settings/travis.py
from defaults import * DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine' } }
from defaults import * DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_test', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8080/solr3', }, }
Use Solr for testing with Travis CI
Use Solr for testing with Travis CI
Python
mit
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
ca4d1454bd68715098001f9815af5325d157e786
geodj/youtube.py
geodj/youtube.py
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery from django.utils.encoding import smart_str import re class YoutubeMusic: def __init__(self): self.service = YouTubeService() def search(self, artist): query = YouTubeVideoQuery() query.vq = artist query.orderby = 'relevance' query.racy = 'exclude' query.format = '5' query.max_results = 50 query.categories.append("/Music") feed = self.service.YouTubeQuery(query) results = [] for entry in feed.entry: if not self.is_valid_entry(artist, entry): continue results.append({ 'url': entry.media.player.url, 'title': smart_str(entry.media.title.text), 'duration': int(entry.media.duration.seconds), }) return {'artist': artist, 'results': results} def is_valid_entry(self, artist, entry): duration = int(entry.media.duration.seconds) title = smart_str(entry.media.title.text).lower() if entry.rating is not None and float(entry.rating.average) < 3.5: return False if entry.statistics is None or entry.statistics.view_count < 1000: return False if duration < (2 * 60) or duration > (9 * 60): return False if artist.lower() not in title: return False if re.search(r"\b(perform|performance|concert|cover)\b", title): return False return True
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery from django.utils.encoding import smart_str import re class YoutubeMusic: def __init__(self): self.service = YouTubeService() def search(self, artist): query = YouTubeVideoQuery() query.vq = artist query.orderby = 'relevance' query.racy = 'exclude' query.format = '5' query.max_results = 50 query.categories.append("/Music") feed = self.service.YouTubeQuery(query) results = [] for entry in feed.entry: if not self.is_valid_entry(artist, entry): continue results.append({ 'url': entry.media.player.url, 'title': smart_str(entry.media.title.text), 'duration': int(entry.media.duration.seconds), }) return {'artist': artist, 'results': results} def is_valid_entry(self, artist, entry): duration = int(entry.media.duration.seconds) title = smart_str(entry.media.title.text).lower() if entry.rating is not None and float(entry.rating.average) < 3.5: return False if entry.statistics is None or int(entry.statistics.view_count) < 1000: return False if duration < (2 * 60) or duration > (9 * 60): return False if artist.lower() not in title: return False if re.search(r"\b(perform|performance|concert|cover)\b", title): return False return True
Fix view count cutoff for YT
Fix view count cutoff for YT
Python
mit
6/GeoDJ,6/GeoDJ
080e4336675ea29b28b63698e5a0e77e91d54a2b
exercises/acronym/acronym_test.py
exercises/acronym/acronym_test.py
import unittest from acronym import abbreviate # test cases adapted from `x-common//canonical-data.json` @ version: 1.0.0 class AcronymTest(unittest.TestCase): def test_basic(self): self.assertEqual(abbreviate('Portable Network Graphics'), 'PNG') def test_lowercase_words(self): self.assertEqual(abbreviate('Ruby on Rails'), 'ROR') def test_camelcase(self): self.assertEqual(abbreviate('HyperText Markup Language'), 'HTML') def test_punctuation(self): self.assertEqual(abbreviate('First In, First Out'), 'FIFO') def test_all_caps_words(self): self.assertEqual(abbreviate('PHP: Hypertext Preprocessor'), 'PHP') def test_non_acronym_all_caps_word(self): self.assertEqual(abbreviate('GNU Image Manipulation Program'), 'GIMP') def test_hyphenated(self): self.assertEqual( abbreviate('Complementary metal-oxide semiconductor'), 'CMOS') if __name__ == '__main__': unittest.main()
import unittest from acronym import abbreviate # test cases adapted from `x-common//canonical-data.json` @ version: 1.1.0 class AcronymTest(unittest.TestCase): def test_basic(self): self.assertEqual(abbreviate('Portable Network Graphics'), 'PNG') def test_lowercase_words(self): self.assertEqual(abbreviate('Ruby on Rails'), 'ROR') def test_punctuation(self): self.assertEqual(abbreviate('First In, First Out'), 'FIFO') def test_all_caps_words(self): self.assertEqual(abbreviate('PHP: Hypertext Preprocessor'), 'PHP') def test_non_acronym_all_caps_word(self): self.assertEqual(abbreviate('GNU Image Manipulation Program'), 'GIMP') def test_hyphenated(self): self.assertEqual( abbreviate('Complementary metal-oxide semiconductor'), 'CMOS') if __name__ == '__main__': unittest.main()
Remove test with mixed-case word
acronym: Remove test with mixed-case word see: https://github.com/exercism/x-common/pull/788
Python
mit
jmluy/xpython,smalley/python,exercism/xpython,exercism/python,smalley/python,jmluy/xpython,pheanex/xpython,pheanex/xpython,exercism/xpython,behrtam/xpython,exercism/python,N-Parsons/exercism-python,N-Parsons/exercism-python,mweb/python,mweb/python,behrtam/xpython
21783b68abeb8085b96f6b49fceaac079d23a341
heufybot/modules/commands/join.py
heufybot/modules/commands/join.py
from twisted.plugin import IPlugin from heufybot.moduleinterface import IBotModule from heufybot.modules.commandinterface import BotCommand from zope.interface import implements class JoinCommand(BotCommand): implements(IPlugin, IBotModule) name = "Join" def triggers(self): return ["join"] def execute(self, server, source, command, params, data): if len(params) < 1: self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Join what?") return if len(params) > 2: self.bot.servers[server].outputHandler.cmdJOIN(params[0], params[1]) else: self.bot.servers[server].outputHandler.cmdJOIN(params[0]) joinCommand = JoinCommand()
from twisted.plugin import IPlugin from heufybot.moduleinterface import IBotModule from heufybot.modules.commandinterface import BotCommand from zope.interface import implements class JoinCommand(BotCommand): implements(IPlugin, IBotModule) name = "Join" def triggers(self): return ["join"] def execute(self, server, source, command, params, data): if len(params) < 1: self.bot.servers[server].outputHandler.cmdPRIVMSG(source, "Join what?") return if len(params) > 1: self.bot.servers[server].outputHandler.cmdJOIN(params[0], params[1]) else: self.bot.servers[server].outputHandler.cmdJOIN(params[0]) joinCommand = JoinCommand()
Check the parameters for the Join command correctly
Check the parameters for the Join command correctly
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
932748b11bce94076f2e2d5637d4e8db8d4d1dbf
tcelery/__init__.py
tcelery/__init__.py
from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 3, 3) __version__ = '.'.join(map(str, VERSION)) def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect)
from __future__ import absolute_import import celery from tornado import ioloop from .connection import ConnectionPool from .producer import NonBlockingTaskProducer from .result import AsyncResult VERSION = (0, 3, 4) __version__ = '.'.join(map(str, VERSION)) + '-dev' def setup_nonblocking_producer(celery_app=None, io_loop=None, on_ready=None, result_cls=AsyncResult, limit=1): celery_app = celery_app or celery.current_app io_loop = io_loop or ioloop.IOLoop.instance() NonBlockingTaskProducer.app = celery_app NonBlockingTaskProducer.conn_pool = ConnectionPool(limit, io_loop) NonBlockingTaskProducer.result_cls = result_cls if celery_app.conf['BROKER_URL'] and celery_app.conf['BROKER_URL'].startswith('amqp'): celery.app.amqp.AMQP.producer_cls = NonBlockingTaskProducer def connect(): broker_url = celery_app.connection().as_uri(include_password=True) options = celery_app.conf.get('CELERYT_PIKA_OPTIONS', {}) NonBlockingTaskProducer.conn_pool.connect(broker_url, options=options, callback=on_ready) io_loop.add_callback(connect)
Mark master as development version
Mark master as development version
Python
bsd-3-clause
qudos-com/tornado-celery,sangwonl/tornado-celery,mher/tornado-celery,shnjp/tornado-celery
786f40b8bd93fa975819de5ae633361e09e1ff72
emission_events/emission_events/settings/production.py
emission_events/emission_events/settings/production.py
####################### # PRODUCTION SETTINGS # ####################### from os import environ from django.core.exceptions import ImproperlyConfigured from .base import * DEBUG = False def get_env_setting(setting): """ Get the environment setting or return exception """ try: return environ[setting] except KeyError: error_msg = "Set the %s env variable" % setting raise ImproperlyConfigured(error_msg) ###################### # HOST CONFIGURATION # ###################### # https://docs.djangoproject.com/en/1.7/ref/settings/#allowed-hosts # https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production ALLOWED_HOSTS = ['*.texastribune.org'] ############################ # SECRET KEY CONFIGURATION # ############################ # https://docs.djangoproject.com/en/1.7/ref/settings/#secret-key SECRET_KEY = get_env_setting('SECRET_KEY')
####################### # PRODUCTION SETTINGS # ####################### from os import environ from django.core.exceptions import ImproperlyConfigured from .base import * DEBUG = False def get_env_setting(setting): """ Get the environment setting or return exception """ try: return environ[setting] except KeyError: error_msg = "Set the %s env variable" % setting raise ImproperlyConfigured(error_msg) ###################### # HOST CONFIGURATION # ###################### # https://docs.djangoproject.com/en/1.7/ref/settings/#allowed-hosts # https://docs.djangoproject.com/en/1.5/releases/1.5/#allowed-hosts-required-in-production ALLOWED_HOSTS = ['.texastribune.org'] ############################ # SECRET KEY CONFIGURATION # ############################ # https://docs.djangoproject.com/en/1.7/ref/settings/#secret-key SECRET_KEY = get_env_setting('SECRET_KEY')
Fix the things that are broken
Fix the things that are broken
Python
mit
texastribune/emissions-database,texastribune/emissions-database,texastribune/emissions-database,texastribune/emissions-database
b3e0d640ff4b7dbc5c30dc9ebb69578acfe02f07
jose/__init__.py
jose/__init__.py
__version__ = "0.3.0" __author__ = 'Michael Davis' __license__ = 'MIT' __copyright__ = 'Copyright 2015 Michael Davis' from .exceptions import JOSEError from .exceptions import JWSError from .exceptions import ExpiredSignatureError from .exceptions import JWTError
__version__ = "0.4.0" __author__ = 'Michael Davis' __license__ = 'MIT' __copyright__ = 'Copyright 2015 Michael Davis' from .exceptions import JOSEError from .exceptions import JWSError from .exceptions import ExpiredSignatureError from .exceptions import JWTError
Validate sub and jti claims
Validate sub and jti claims
Python
mit
mpdavis/python-jose
f429707e0a5a97d741ac9c118646c9d171a5830d
kiwi/ui/pixbufutils.py
kiwi/ui/pixbufutils.py
# # Kiwi: a Framework and Enhanced Widgets for Python # # Copyright (C) 2012 Async Open Source # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # # Author(s): Johan Dahlin <[email protected]> # from gtk import gdk def pixbuf_from_string(pixbuf_data, format='png'): loader = gdk.PixbufLoader(format) loader.write(pixbuf_data) loader.close() return loader.get_pixbuf()
# # Kiwi: a Framework and Enhanced Widgets for Python # # Copyright (C) 2012 Async Open Source # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 # USA # # Author(s): Johan Dahlin <[email protected]> # from gtk import gdk def pixbuf_from_string(pixbuf_data, format='png', width=None, height=None): loader = gdk.PixbufLoader(format) loader.write(pixbuf_data) loader.close() pixbuf = loader.get_pixbuf() if width is not None or height is not None: scaled_pixbuf = pixbuf.scale_simple(width, height, gdk.INTERP_BILINEAR) if scaled_pixbuf is None: print 'Warning: could not scale image' else: pixbuf = scaled_pixbuf return pixbuf
Add width and height to pixbuf_from_string() and scale the image if any of them are set
Add width and height to pixbuf_from_string() and scale the image if any of them are set
Python
lgpl-2.1
stoq/kiwi
84b907ad78f03d614e8af14578c21e1228ab723d
top.py
top.py
""" Hacker News Top: -Get top stories from Hacker News' official API -Record all users who comment on those stories Author: Rylan Santinon """ from api_connector import * from csv_io import * def main(): conn = ApiConnector() csvio = CsvIo() article_list = conn.get_top() stories = [] for i in article_list: try: story = conn.get_item(i) if story.get("deleted"): continue print csvio.story_to_csv(story) stories.append(story) except NetworkError as e: print e csvio.write_stories_csv(stories) for story in stories: try: conn.get_kids(story) except NetworkError as e: print e users = [] for u in sorted(conn.user_dict.keys()): try: userjson = conn.get_user(u) users.append(userjson) print u except NetworkError as e: print e csvio.write_users_csv(users) if __name__ == '__main__': main() CsvIo().concat_users() CsvIo().concat_stories()
""" Hacker News Top: -Get top stories from Hacker News' official API -Record all users who comment on those stories Author: Rylan Santinon """ from api_connector import * from csv_io import * def main(): conn = ApiConnector() csvio = CsvIo() article_list = conn.get_top() stories = [] for i in article_list: try: story = conn.get_item(i) if story.get("deleted"): continue print csvio.story_to_csv(story) stories.append(story) except NetworkError as e: print e csvio.write_stories_csv(stories) for story in stories: try: conn.get_kids(story) except NetworkError as e: print e users = [] for u in sorted(conn.user_dict.keys()): try: userjson = conn.get_user(u) users.append(userjson) print u except NetworkError as e: print e csvio.write_users_csv(users) if __name__ == '__main__': csvio = CsvIo() main() csvio.concat_users() csvio.concat_stories()
Use common object for csvio calls
Use common object for csvio calls
Python
apache-2.0
rylans/hackernews-top,davande/hackernews-top
193781255bca1a8748945a467e6d7736f4d460cb
jp2_online/settings/production.py
jp2_online/settings/production.py
# -*- coding: utf-8 -*- from .base import * # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = ['YOUR_DOMAIN(S)_GO_HERE'] CORS_ORIGIN_WHITELIST = ('ALLOWED_DOMAINS') STATIC_ROOT = os.path.join(BASE_DIR, "../static/")
# -*- coding: utf-8 -*- from .base import * # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ALLOWED_HOSTS = ['138.197.197.47'] CORS_ORIGIN_WHITELIST = ('ALLOWED_DOMAINS') STATIC_ROOT = os.path.join(BASE_DIR, "../static/")
Change allowed hosts to include server ip
Change allowed hosts to include server ip
Python
mit
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
124489e979ed9d913b97ff688ce65d678579e638
morse_modem.py
morse_modem.py
import cProfile from demodulate.cfg import * from demodulate.detect_tone import * from demodulate.element_resolve import * from gen_test import * if __name__ == "__main__": #gen_test_data() data = gen_test_data() #print len(data)/SAMPLE_FREQ #cProfile.run('detect_tone(data)') #print detect_tone(data) element_resolve(*detect_tone(data))
import cProfile from demodulate.cfg import * from demodulate.detect_tone import * from demodulate.element_resolve import * from gen_tone import * import random if __name__ == "__main__": WPM = random.uniform(2,20) pattern = [1,0,1,1,1,0,0,0,0,0,0,0] # morse code 'A' #gen_test_data() data = gen_tone(pattern) #print len(data)/SAMPLE_FREQ #cProfile.run('detect_tone(data)') #print detect_tone(data) element_resolve(*detect_tone(data))
Add tone generation arguments to gen_tone
Add tone generation arguments to gen_tone
Python
mit
nickodell/morse-code
f39f7d64ba8ca8051b24407811239f960cc6f561
lib/collect/backend.py
lib/collect/backend.py
import lib.collect.config as config if config.BACKEND == 'dynamodb': import lib.collect.backends.dymamodb as api else: import lib.collect.backends.localfs as api
import lib.collect.config as config try: if config.BACKEND == 'dynamodb': import lib.collect.backends.dymamodb as api else: import lib.collect.backends.localfs as api except AttributeError: import lib.collect.backends.localfs as api
Fix bug in module selection.
Fix bug in module selection.
Python
mit
ic/mark0
47cedcd514f7c569c1637acc331890bb49aedd90
thumbor/app.py
thumbor/app.py
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/globocom/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com [email protected] import tornado.web import tornado.ioloop from thumbor.handlers.healthcheck import HealthcheckHandler from thumbor.handlers.upload import UploadHandler from thumbor.handlers.images import ImagesHandler from thumbor.handlers.image import ImageHandler from thumbor.url import Url from thumbor.handlers.imaging import ImagingHandler class ThumborServiceApp(tornado.web.Application): def __init__(self, context): self.context = context handlers = [ (r'/healthcheck', HealthcheckHandler), ] # TODO Old handler to upload images if context.config.UPLOAD_ENABLED: handlers.append( (r'/upload', UploadHandler, { 'context': context }) ) # Handler to upload images (POST). handlers.append( (r'/image', ImagesHandler, { 'context': context }) ) # Handler to retrieve or modify existing images (GET, PUT, DELETE) handlers.append( (r'/image/(.*)', ImageHandler, { 'context': context }) ) # Imaging handler (GET) handlers.append( (Url.regex(), ImagingHandler, { 'context': context }) ) super(ThumborServiceApp, self).__init__(handlers)
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/globocom/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com [email protected] import tornado.web import tornado.ioloop from thumbor.handlers.healthcheck import HealthcheckHandler from thumbor.handlers.upload import UploadHandler from thumbor.handlers.images import ImagesHandler from thumbor.handlers.image import ImageHandler from thumbor.url import Url from thumbor.handlers.imaging import ImagingHandler class ThumborServiceApp(tornado.web.Application): def __init__(self, context): self.context = context handlers = [ (r'/healthcheck', HealthcheckHandler), ] if context.config.UPLOAD_ENABLED: # TODO Old handler to upload images handlers.append( (r'/upload', UploadHandler, { 'context': context }) ) # Handler to upload images (POST). handlers.append( (r'/image', ImagesHandler, { 'context': context }) ) # Handler to retrieve or modify existing images (GET, PUT, DELETE) handlers.append( (r'/image/(.*)', ImageHandler, { 'context': context }) ) # Imaging handler (GET) handlers.append( (Url.regex(), ImagingHandler, { 'context': context }) ) super(ThumborServiceApp, self).__init__(handlers)
Disable REST Upload by default
Disable REST Upload by default
Python
mit
fanhero/thumbor,fanhero/thumbor,fanhero/thumbor,fanhero/thumbor
0be201e5f00d0a0327c2878d9d944952157ccaef
units/games.py
units/games.py
import random def eightball(): return random.choice(("It is certain", "It is decidedly so", "Without a doubt", "Yes, definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"))
import random def eightball(): return random.choice(("It is certain", "It is decidedly so", "Without a doubt", "Yes, definitely", "You may rely on it", "As I see it, yes", "Most likely", "Outlook good", "Yes", "Signs point to yes", "Reply hazy; try again", "Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again", "Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"))
Fix eightball method response grammar
Fix eightball method response grammar
Python
mit
Harmon758/Harmonbot,Harmon758/Harmonbot
f6bff4e5360ba2c0379c129a111d333ee718c1d3
datafeeds/usfirst_event_teams_parser.py
datafeeds/usfirst_event_teams_parser.py
import re from BeautifulSoup import BeautifulSoup from datafeeds.parser_base import ParserBase class UsfirstEventTeamsParser(ParserBase): @classmethod def parse(self, html): """ Find what Teams are attending an Event, and return their team_numbers. """ teamRe = re.compile(r'whats-going-on/team/FRC/[A-Za-z0-9=&;\-:]*?">\d+') teamNumberRe = re.compile(r'\d+$') tpidRe = re.compile(r'\d+') teams = list() for teamResult in teamRe.findall(html): team = dict() team["team_number"] = int(teamNumberRe.findall(teamResult)[0]) team["first_tpid"] = int(tpidRe.findall(teamResult)[0]) teams.append(team) soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) more_pages = soup.find('a', {'title': 'Go to next page'}) is not None return teams, more_pages
import re from BeautifulSoup import BeautifulSoup from datafeeds.parser_base import ParserBase class UsfirstEventTeamsParser(ParserBase): @classmethod def parse(self, html): """ Find what Teams are attending an Event, and return their team_numbers. """ teamRe = re.compile(r'whats-going-on\/team\/(\d*)\?ProgramCode=FRC">(\d*)') teams = list() for first_tpid, team_number in teamRe.findall(html): team = dict() team["first_tpid"] = int(first_tpid) team["team_number"] = int(team_number) teams.append(team) soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES) more_pages = soup.find('a', {'title': 'Go to next page'}) is not None return teams, more_pages
Fix event teams parser for new format
Fix event teams parser for new format
Python
mit
the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance
b80726a5a36480b4146fc4df89ad96a738aa2091
waitress/settings/__init__.py
waitress/settings/__init__.py
import os if os.getenv('OPENSHIFT_REPO_DIR'): from .staging import * elif os.getenv('TRAVIS_CI'): from .testing import * else: from .development import *
import os if os.getenv('OPENSHIFT_REPO_DIR'): from .staging import * elif os.getenv('TRAVIS_CI'): from .testing import * elif os.getenv('HEROKU'): from .production import * else: from .development import *
Use production settings in Heroku
[fix] Use production settings in Heroku
Python
mit
waitress-andela/waitress,andela-osule/waitress,andela-osule/waitress,andela-osule/waitress,waitress-andela/waitress,waitress-andela/waitress
814684225140231de25dc7ee616c6bfa73b312ee
addons/hr/__terp__.py
addons/hr/__terp__.py
{ "name" : "Human Resources", "version" : "1.0", "author" : "Tiny", "category" : "Generic Modules/Human Resources", "website" : "http://tinyerp.com/module_hr.html", "description": """ Module for human resource management. You can manage: * Employees and hierarchies * Work hours sheets * Attendances and sign in/out system * Holidays Different reports are also provided, mainly for attendance statistics. """, "depends" : ["base"], "init_xml" : [], "demo_xml" : [ "hr_demo.xml", "hr_bel_holidays_2005.xml", "hr_department_demo.xml" ], "update_xml" : [ "hr_view.xml", "hr_report.xml", "hr_wizard.xml", "hr_department_view.xml" ], "active": False, "installable": True }
{ "name" : "Human Resources", "version" : "1.0", "author" : "Tiny", "category" : "Generic Modules/Human Resources", "website" : "http://tinyerp.com/module_hr.html", "description": """ Module for human resource management. You can manage: * Employees and hierarchies * Work hours sheets * Attendances and sign in/out system * Holidays Different reports are also provided, mainly for attendance statistics. """, "depends" : ["base"], "init_xml" : [], "demo_xml" : [ "hr_demo.xml", "hr_bel_holidays_2005.xml", "hr_department_demo.xml" ], "update_xml" : [ "hr_view.xml", "hr_report.xml", "hr_wizard.xml", "hr_department_view.xml", "hr_security.xml" ], "active": False, "installable": True }
Add hr_security.xml file entry in update_xml section
Add hr_security.xml file entry in update_xml section bzr revid: [email protected]
Python
agpl-3.0
VielSoft/odoo,naousse/odoo,tarzan0820/odoo,BT-ojossen/odoo,leoliujie/odoo,Danisan/odoo-1,ehirt/odoo,odooindia/odoo,ThinkOpen-Solutions/odoo,bakhtout/odoo-educ,ingadhoc/odoo,naousse/odoo,datenbetrieb/odoo,charbeljc/OCB,sysadminmatmoz/OCB,cysnake4713/odoo,arthru/OpenUpgrade,bwrsandman/OpenUpgrade,ovnicraft/odoo,sysadminmatmoz/OCB,Adel-Magebinary/odoo,realsaiko/odoo,OpusVL/odoo,inspyration/odoo,tvibliani/odoo,ClearCorp-dev/odoo,rschnapka/odoo,jusdng/odoo,florentx/OpenUpgrade,blaggacao/OpenUpgrade,CopeX/odoo,ChanduERP/odoo,JCA-Developpement/Odoo,ThinkOpen-Solutions/odoo,mustafat/odoo-1,synconics/odoo,ovnicraft/odoo,Ichag/odoo,dalegregory/odoo,guerrerocarlos/odoo,hbrunn/OpenUpgrade,hoatle/odoo,ojengwa/odoo,juanalfonsopr/odoo,demon-ru/iml-crm,nexiles/odoo,rschnapka/odoo,ujjwalwahi/odoo,sv-dev1/odoo,fuselock/odoo,hifly/OpenUpgrade,arthru/OpenUpgrade,joshuajan/odoo,Grirrane/odoo,Codefans-fan/odoo,srsman/odoo,steedos/odoo,mkieszek/odoo,bakhtout/odoo-educ,leoliujie/odoo,poljeff/odoo,fuselock/odoo,fjbatresv/odoo,tvibliani/odoo,abstract-open-solutions/OCB,pedrobaeza/OpenUpgrade,VitalPet/odoo,osvalr/odoo,nitinitprof/odoo,rdeheele/odoo,joshuajan/odoo,hanicker/odoo,GauravSahu/odoo,dsfsdgsbngfggb/odoo,draugiskisprendimai/odoo,apocalypsebg/odoo,joariasl/odoo,MarcosCommunity/odoo,mszewczy/odoo,diagramsoftware/odoo,abstract-open-solutions/OCB,JonathanStein/odoo,OpusVL/odoo,Adel-Magebinary/odoo,brijeshkesariya/odoo,fuhongliang/odoo,OpenPymeMx/OCB,dkubiak789/odoo,gsmartway/odoo,SerpentCS/odoo,rubencabrera/odoo,ApuliaSoftware/odoo,Gitlab11/odoo,patmcb/odoo,gsmartway/odoo,rgeleta/odoo,janocat/odoo,frouty/odoogoeen,SAM-IT-SA/odoo,apanju/odoo,ClearCorp-dev/odoo,frouty/odoogoeen,guewen/OpenUpgrade,tvtsoft/odoo8,nagyistoce/odoo-dev-odoo,makinacorpus/odoo,x111ong/odoo,dkubiak789/odoo,ygol/odoo,bguillot/OpenUpgrade,ehirt/odoo,laslabs/odoo,hopeall/odoo,florentx/OpenUpgrade,jolevq/odoopub,vnsofthe/odoo,OpenUpgrade/OpenUpgrade,luiseduardohdbackup/odoo,mvaled/OpenUpgrade,oliverhr/odoo,dezynetechnologies/odoo,naousse/odoo,dllsf/odootest,ccomb/OpenUpgrade,guerrerocarlos/odoo,markeTIC/OCB,lsinfo/odoo,fgesora/odoo,jiangzhixiao/odoo,shaufi10/odoo,fevxie/odoo,alqfahad/odoo,gdgellatly/OCB1,spadae22/odoo,nagyistoce/odoo-dev-odoo,Endika/odoo,andreparames/odoo,ovnicraft/odoo,omprakasha/odoo,Nowheresly/odoo,MarcosCommunity/odoo,QianBIG/odoo,mkieszek/odoo,simongoffin/website_version,florentx/OpenUpgrade,sebalix/OpenUpgrade,leorochael/odoo,apanju/GMIO_Odoo,realsaiko/odoo,gvb/odoo,florian-dacosta/OpenUpgrade,slevenhagen/odoo,naousse/odoo,abstract-open-solutions/OCB,hubsaysnuaa/odoo,nuncjo/odoo,prospwro/odoo,colinnewell/odoo,ShineFan/odoo,QianBIG/odoo,jesramirez/odoo,nhomar/odoo-mirror,makinacorpus/odoo,OSSESAC/odoopubarquiluz,draugiskisprendimai/odoo,cloud9UG/odoo,odoousers2014/odoo,gvb/odoo,Drooids/odoo,BT-astauder/odoo,vrenaville/ngo-addons-backport,bplancher/odoo,blaggacao/OpenUpgrade,optima-ict/odoo,MarcosCommunity/odoo,realsaiko/odoo,ThinkOpen-Solutions/odoo,collex100/odoo,stephen144/odoo,nexiles/odoo,jaxkodex/odoo,fjbatresv/odoo,patmcb/odoo,markeTIC/OCB,dariemp/odoo,FlorianLudwig/odoo,ThinkOpen-Solutions/odoo,salaria/odoo,stephen144/odoo,laslabs/odoo,laslabs/odoo,lombritz/odoo,demon-ru/iml-crm,gvb/odoo,pedrobaeza/odoo,rahuldhote/odoo,osvalr/odoo,javierTerry/odoo,glovebx/odoo,jiangzhixiao/odoo,NL66278/OCB,markeTIC/OCB,collex100/odoo,rowemoore/odoo,jusdng/odoo,GauravSahu/odoo,BT-rmartin/odoo,Drooids/odoo,mustafat/odoo-1,rahuldhote/odoo,FlorianLudwig/odoo,markeTIC/OCB,nagyistoce/odoo-dev-odoo,ShineFan/odoo,dariemp/odoo,CubicERP/odoo,mvaled/OpenUpgrade,andreparames/odoo,kybriainfotech/iSocioCRM,grap/OpenUpgrade,jaxkodex/odoo,Bachaco-ve/odoo,slevenhagen/odoo-npg,slevenhagen/odoo-npg,factorlibre/OCB,gdgellatly/OCB1,Noviat/odoo,hmen89/odoo,CatsAndDogsbvba/odoo,GauravSahu/odoo,oihane/odoo,podemos-info/odoo,luiseduardohdbackup/odoo,kybriainfotech/iSocioCRM,havt/odoo,BT-rmartin/odoo,ihsanudin/odoo,provaleks/o8,apanju/GMIO_Odoo,VielSoft/odoo,vnsofthe/odoo,shaufi/odoo,slevenhagen/odoo,colinnewell/odoo,juanalfonsopr/odoo,wangjun/odoo,mvaled/OpenUpgrade,funkring/fdoo,Nowheresly/odoo,havt/odoo,TRESCLOUD/odoopub,mlaitinen/odoo,papouso/odoo,acshan/odoo,fevxie/odoo,kittiu/odoo,sebalix/OpenUpgrade,nexiles/odoo,Elico-Corp/odoo_OCB,wangjun/odoo,bakhtout/odoo-educ,fjbatresv/odoo,grap/OpenUpgrade,vnsofthe/odoo,ccomb/OpenUpgrade,codekaki/odoo,microcom/odoo,slevenhagen/odoo-npg,sebalix/OpenUpgrade,minhtuancn/odoo,ygol/odoo,tangyiyong/odoo,nuncjo/odoo,acshan/odoo,tangyiyong/odoo,jesramirez/odoo,sergio-incaser/odoo,Noviat/odoo,nuuuboo/odoo,hip-odoo/odoo,shingonoide/odoo,savoirfairelinux/OpenUpgrade,rgeleta/odoo,arthru/OpenUpgrade,Adel-Magebinary/odoo,Ernesto99/odoo,leoliujie/odoo,synconics/odoo,lombritz/odoo,lgscofield/odoo,jiangzhixiao/odoo,bwrsandman/OpenUpgrade,tangyiyong/odoo,fgesora/odoo,luistorresm/odoo,tinkhaven-organization/odoo,tvtsoft/odoo8,jpshort/odoo,Danisan/odoo-1,rschnapka/odoo,kybriainfotech/iSocioCRM,dfang/odoo,Elico-Corp/odoo_OCB,tinkerthaler/odoo,bkirui/odoo,ccomb/OpenUpgrade,jeasoft/odoo,cloud9UG/odoo,SAM-IT-SA/odoo,feroda/odoo,abenzbiria/clients_odoo,JCA-Developpement/Odoo,guerrerocarlos/odoo,OpenPymeMx/OCB,Daniel-CA/odoo,apanju/odoo,ramitalat/odoo,frouty/odoogoeen,zchking/odoo,prospwro/odoo,slevenhagen/odoo,ramadhane/odoo,bplancher/odoo,Kilhog/odoo,hifly/OpenUpgrade,gavin-feng/odoo,NeovaHealth/odoo,cloud9UG/odoo,markeTIC/OCB,prospwro/odoo,dfang/odoo,patmcb/odoo,ojengwa/odoo,jpshort/odoo,fossoult/odoo,Ernesto99/odoo,ojengwa/odoo,joariasl/odoo,grap/OCB,dgzurita/odoo,kybriainfotech/iSocioCRM,ovnicraft/odoo,Grirrane/odoo,Noviat/odoo,dariemp/odoo,Gitlab11/odoo,nhomar/odoo,apanju/GMIO_Odoo,dfang/odoo,Elico-Corp/odoo_OCB,dfang/odoo,gdgellatly/OCB1,ygol/odoo,jeasoft/odoo,makinacorpus/odoo,alqfahad/odoo,NeovaHealth/odoo,sysadminmatmoz/OCB,Antiun/odoo,ovnicraft/odoo,x111ong/odoo,luistorresm/odoo,idncom/odoo,dezynetechnologies/odoo,shivam1111/odoo,gdgellatly/OCB1,ehirt/odoo,ingadhoc/odoo,blaggacao/OpenUpgrade,dkubiak789/odoo,jiachenning/odoo,gsmartway/odoo,cedk/odoo,codekaki/odoo,nhomar/odoo,KontorConsulting/odoo,diagramsoftware/odoo,ramadhane/odoo,storm-computers/odoo,kirca/OpenUpgrade,stonegithubs/odoo,goliveirab/odoo,nuuuboo/odoo,n0m4dz/odoo,microcom/odoo,OpenUpgrade-dev/OpenUpgrade,blaggacao/OpenUpgrade,frouty/odoo_oph,rubencabrera/odoo,simongoffin/website_version,leorochael/odoo,sinbazhou/odoo,klunwebale/odoo,nhomar/odoo-mirror,rubencabrera/odoo,abdellatifkarroum/odoo,alhashash/odoo,oasiswork/odoo,ChanduERP/odoo,lgscofield/odoo,sadleader/odoo,oliverhr/odoo,odootr/odoo,windedge/odoo,matrixise/odoo,vrenaville/ngo-addons-backport,incaser/odoo-odoo,odoo-turkiye/odoo,dkubiak789/odoo,CatsAndDogsbvba/odoo,tvtsoft/odoo8,jiachenning/odoo,rschnapka/odoo,Daniel-CA/odoo,savoirfairelinux/OpenUpgrade,sve-odoo/odoo,fuselock/odoo,matrixise/odoo,hopeall/odoo,JonathanStein/odoo,Eric-Zhong/odoo,draugiskisprendimai/odoo,cpyou/odoo,Noviat/odoo,bkirui/odoo,Grirrane/odoo,guewen/OpenUpgrade,andreparames/odoo,osvalr/odoo,jesramirez/odoo,funkring/fdoo,avoinsystems/odoo,ecosoft-odoo/odoo,OSSESAC/odoopubarquiluz,brijeshkesariya/odoo,csrocha/OpenUpgrade,BT-rmartin/odoo,damdam-s/OpenUpgrade,alexcuellar/odoo,JGarcia-Panach/odoo,hifly/OpenUpgrade,papouso/odoo,abenzbiria/clients_odoo,oasiswork/odoo,bealdav/OpenUpgrade,highco-groupe/odoo,aviciimaxwell/odoo,KontorConsulting/odoo,hassoon3/odoo,shaufi/odoo,AuyaJackie/odoo,credativUK/OCB,VitalPet/odoo,n0m4dz/odoo,cpyou/odoo,Endika/odoo,alhashash/odoo,provaleks/o8,Danisan/odoo-1,OpenUpgrade-dev/OpenUpgrade,pedrobaeza/odoo,srimai/odoo,dfang/odoo,grap/OCB,savoirfairelinux/odoo,Elico-Corp/odoo_OCB,rowemoore/odoo,QianBIG/odoo,incaser/odoo-odoo,joariasl/odoo,VitalPet/odoo,jeasoft/odoo,colinnewell/odoo,CopeX/odoo,alexcuellar/odoo,PongPi/isl-odoo,makinacorpus/odoo,mszewczy/odoo,lightcn/odoo,goliveirab/odoo,tarzan0820/odoo,christophlsa/odoo,avoinsystems/odoo,blaggacao/OpenUpgrade,deKupini/erp,fdvarela/odoo8,Adel-Magebinary/odoo,FlorianLudwig/odoo,bealdav/OpenUpgrade,stonegithubs/odoo,ramadhane/odoo,Codefans-fan/odoo,charbeljc/OCB,JonathanStein/odoo,christophlsa/odoo,x111ong/odoo,Drooids/odoo,takis/odoo,rschnapka/odoo,avoinsystems/odoo,cpyou/odoo,ThinkOpen-Solutions/odoo,chiragjogi/odoo,JonathanStein/odoo,doomsterinc/odoo,tinkerthaler/odoo,podemos-info/odoo,storm-computers/odoo,kybriainfotech/iSocioCRM,guewen/OpenUpgrade,Kilhog/odoo,pedrobaeza/OpenUpgrade,funkring/fdoo,OpenUpgrade/OpenUpgrade,grap/OCB,thanhacun/odoo,gorjuce/odoo,ClearCorp-dev/odoo,apocalypsebg/odoo,gvb/odoo,shaufi/odoo,omprakasha/odoo,mlaitinen/odoo,hanicker/odoo,pedrobaeza/OpenUpgrade,jesramirez/odoo,lombritz/odoo,jaxkodex/odoo,PongPi/isl-odoo,apocalypsebg/odoo,OpenUpgrade-dev/OpenUpgrade,SerpentCS/odoo,gavin-feng/odoo,OpenUpgrade-dev/OpenUpgrade,sv-dev1/odoo,factorlibre/OCB,idncom/odoo,alhashash/odoo,odootr/odoo,lsinfo/odoo,OpenUpgrade/OpenUpgrade,mustafat/odoo-1,BT-fgarbely/odoo,jeasoft/odoo,javierTerry/odoo,doomsterinc/odoo,MarcosCommunity/odoo,charbeljc/OCB,pplatek/odoo,papouso/odoo,guewen/OpenUpgrade,idncom/odoo,steedos/odoo,Grirrane/odoo,NeovaHealth/odoo,fevxie/odoo,sergio-incaser/odoo,ehirt/odoo,bobisme/odoo,RafaelTorrealba/odoo,OpenPymeMx/OCB,datenbetrieb/odoo,eino-makitalo/odoo,lsinfo/odoo,leorochael/odoo,dalegregory/odoo,patmcb/odoo,virgree/odoo,ygol/odoo,odooindia/odoo,jolevq/odoopub,mkieszek/odoo,JonathanStein/odoo,VitalPet/odoo,brijeshkesariya/odoo,VitalPet/odoo,shaufi10/odoo,srimai/odoo,matrixise/odoo,bakhtout/odoo-educ,mmbtba/odoo,alhashash/odoo,dariemp/odoo,tvtsoft/odoo8,luiseduardohdbackup/odoo,sysadminmatmoz/OCB,JonathanStein/odoo,ApuliaSoftware/odoo,gavin-feng/odoo,cpyou/odoo,odoo-turkiye/odoo,steedos/odoo,factorlibre/OCB,jusdng/odoo,Endika/odoo,Nick-OpusVL/odoo,pedrobaeza/OpenUpgrade,Maspear/odoo,dgzurita/odoo,waytai/odoo,fossoult/odoo,frouty/odoogoeen,KontorConsulting/odoo,acshan/odoo,frouty/odoogoeen,florentx/OpenUpgrade,leorochael/odoo,Kilhog/odoo,jusdng/odoo,draugiskisprendimai/odoo,takis/odoo,idncom/odoo,srimai/odoo,chiragjogi/odoo,windedge/odoo,fdvarela/odoo8,lgscofield/odoo,Kilhog/odoo,rschnapka/odoo,thanhacun/odoo,shingonoide/odoo,luiseduardohdbackup/odoo,ujjwalwahi/odoo,mustafat/odoo-1,cpyou/odoo,microcom/odoo,NL66278/OCB,bwrsandman/OpenUpgrade,simongoffin/website_version,alexcuellar/odoo,Danisan/odoo-1,jusdng/odoo,mlaitinen/odoo,funkring/fdoo,feroda/odoo,BT-ojossen/odoo,minhtuancn/odoo,aviciimaxwell/odoo,codekaki/odoo,VitalPet/odoo,OpenPymeMx/OCB,savoirfairelinux/OpenUpgrade,dezynetechnologies/odoo,ecosoft-odoo/odoo,pplatek/odoo,numerigraphe/odoo,NL66278/OCB,joariasl/odoo,hoatle/odoo,grap/OCB,fossoult/odoo,xzYue/odoo,jiachenning/odoo,fuselock/odoo,pedrobaeza/OpenUpgrade,apanju/odoo,agrista/odoo-saas,kirca/OpenUpgrade,synconics/odoo,gavin-feng/odoo,fdvarela/odoo8,synconics/odoo,grap/OCB,acshan/odoo,optima-ict/odoo,odootr/odoo,vnsofthe/odoo,lightcn/odoo,andreparames/odoo,gorjuce/odoo,wangjun/odoo,markeTIC/OCB,dezynetechnologies/odoo,Antiun/odoo,eino-makitalo/odoo,gsmartway/odoo,oasiswork/odoo,ShineFan/odoo,frouty/odoo_oph,matrixise/odoo,oliverhr/odoo,naousse/odoo,charbeljc/OCB,shivam1111/odoo,mvaled/OpenUpgrade,minhtuancn/odoo,sadleader/odoo,inspyration/odoo,bguillot/OpenUpgrade,hip-odoo/odoo,kittiu/odoo,steedos/odoo,tvtsoft/odoo8,Danisan/odoo-1,incaser/odoo-odoo,javierTerry/odoo,hoatle/odoo,stonegithubs/odoo,pedrobaeza/odoo,christophlsa/odoo,nitinitprof/odoo,sinbazhou/odoo,shivam1111/odoo,vnsofthe/odoo,srsman/odoo,apanju/GMIO_Odoo,BT-ojossen/odoo,jeasoft/odoo,tangyiyong/odoo,JCA-Developpement/Odoo,lsinfo/odoo,lgscofield/odoo,nhomar/odoo,dkubiak789/odoo,SerpentCS/odoo,rdeheele/odoo,factorlibre/OCB,rahuldhote/odoo,storm-computers/odoo,srsman/odoo,lightcn/odoo,CatsAndDogsbvba/odoo,JGarcia-Panach/odoo,gsmartway/odoo,odoousers2014/odoo,apocalypsebg/odoo,fossoult/odoo,OpenPymeMx/OCB,RafaelTorrealba/odoo,aviciimaxwell/odoo,credativUK/OCB,sinbazhou/odoo,hubsaysnuaa/odoo,ihsanudin/odoo,luiseduardohdbackup/odoo,damdam-s/OpenUpgrade,RafaelTorrealba/odoo,jaxkodex/odoo,Grirrane/odoo,collex100/odoo,Ernesto99/odoo,synconics/odoo,tangyiyong/odoo,tarzan0820/odoo,ramadhane/odoo,glovebx/odoo,synconics/odoo,lightcn/odoo,camptocamp/ngo-addons-backport,thanhacun/odoo,goliveirab/odoo,savoirfairelinux/odoo,florian-dacosta/OpenUpgrade,fuhongliang/odoo,bakhtout/odoo-educ,Endika/odoo,bwrsandman/OpenUpgrade,tinkerthaler/odoo,diagramsoftware/odoo,ubic135/odoo-design,thanhacun/odoo,ecosoft-odoo/odoo,mmbtba/odoo,camptocamp/ngo-addons-backport,shaufi/odoo,KontorConsulting/odoo,lgscofield/odoo,xujb/odoo,vrenaville/ngo-addons-backport,cdrooom/odoo,dgzurita/odoo,optima-ict/odoo,rahuldhote/odoo,shivam1111/odoo,leoliujie/odoo,Daniel-CA/odoo,ApuliaSoftware/odoo,OpenPymeMx/OCB,sadleader/odoo,JCA-Developpement/Odoo,microcom/odoo,sergio-incaser/odoo,factorlibre/OCB,RafaelTorrealba/odoo,srimai/odoo,ojengwa/odoo,christophlsa/odoo,lsinfo/odoo,dezynetechnologies/odoo,odoo-turkiye/odoo,fevxie/odoo,mustafat/odoo-1,Bachaco-ve/odoo,TRESCLOUD/odoopub,laslabs/odoo,sebalix/OpenUpgrade,n0m4dz/odoo,makinacorpus/odoo,waytai/odoo,ApuliaSoftware/odoo,aviciimaxwell/odoo,eino-makitalo/odoo,oasiswork/odoo,OpenUpgrade/OpenUpgrade,SerpentCS/odoo,florian-dacosta/OpenUpgrade,rgeleta/odoo,srsman/odoo,tarzan0820/odoo,odootr/odoo,tarzan0820/odoo,xzYue/odoo,dalegregory/odoo,OSSESAC/odoopubarquiluz,provaleks/o8,feroda/odoo,hifly/OpenUpgrade,Grirrane/odoo,frouty/odoogoeen,zchking/odoo,xzYue/odoo,cysnake4713/odoo,TRESCLOUD/odoopub,slevenhagen/odoo,javierTerry/odoo,ccomb/OpenUpgrade,shaufi/odoo,CopeX/odoo,nhomar/odoo,hoatle/odoo,nuuuboo/odoo,elmerdpadilla/iv,bplancher/odoo,ecosoft-odoo/odoo,waytai/odoo,diagramsoftware/odoo,csrocha/OpenUpgrade,pedrobaeza/OpenUpgrade,numerigraphe/odoo,poljeff/odoo,charbeljc/OCB,syci/OCB,cloud9UG/odoo,steedos/odoo,hopeall/odoo,havt/odoo,mlaitinen/odoo,rubencabrera/odoo,klunwebale/odoo,poljeff/odoo,cdrooom/odoo,nuuuboo/odoo,bkirui/odoo,osvalr/odoo,Nick-OpusVL/odoo,csrocha/OpenUpgrade,pedrobaeza/odoo,massot/odoo,dgzurita/odoo,gorjuce/odoo,bobisme/odoo,shivam1111/odoo,AuyaJackie/odoo,rubencabrera/odoo,draugiskisprendimai/odoo,dalegregory/odoo,windedge/odoo,christophlsa/odoo,bwrsandman/OpenUpgrade,hassoon3/odoo,rdeheele/odoo,ShineFan/odoo,bwrsandman/OpenUpgrade,jiangzhixiao/odoo,hmen89/odoo,vrenaville/ngo-addons-backport,savoirfairelinux/odoo,slevenhagen/odoo,nitinitprof/odoo,CopeX/odoo,oihane/odoo,shaufi10/odoo,ihsanudin/odoo,Nowheresly/odoo,Drooids/odoo,credativUK/OCB,glovebx/odoo,frouty/odoo_oph,gdgellatly/OCB1,Eric-Zhong/odoo,papouso/odoo,Antiun/odoo,oliverhr/odoo,odoousers2014/odoo,CopeX/odoo,spadae22/odoo,oliverhr/odoo,provaleks/o8,acshan/odoo,tinkhaven-organization/odoo,cedk/odoo,Endika/OpenUpgrade,ChanduERP/odoo,MarcosCommunity/odoo,idncom/odoo,camptocamp/ngo-addons-backport,BT-astauder/odoo,abstract-open-solutions/OCB,hopeall/odoo,SerpentCS/odoo,ujjwalwahi/odoo,guerrerocarlos/odoo,sve-odoo/odoo,camptocamp/ngo-addons-backport,Antiun/odoo,joariasl/odoo,sergio-incaser/odoo,juanalfonsopr/odoo,NL66278/OCB,Noviat/odoo,waytai/odoo,lightcn/odoo,kittiu/odoo,wangjun/odoo,abenzbiria/clients_odoo,Nick-OpusVL/odoo,demon-ru/iml-crm,mszewczy/odoo,massot/odoo,osvalr/odoo,odoo-turkiye/odoo,JGarcia-Panach/odoo,dkubiak789/odoo,brijeshkesariya/odoo,bealdav/OpenUpgrade,hmen89/odoo,janocat/odoo,savoirfairelinux/OpenUpgrade,tangyiyong/odoo,mustafat/odoo-1,kittiu/odoo,fjbatresv/odoo,fuhongliang/odoo,oihane/odoo,odoo-turkiye/odoo,csrocha/OpenUpgrade,bguillot/OpenUpgrade,TRESCLOUD/odoopub,nagyistoce/odoo-dev-odoo,apocalypsebg/odoo,arthru/OpenUpgrade,abenzbiria/clients_odoo,nagyistoce/odoo-dev-odoo,tinkhaven-organization/odoo,jaxkodex/odoo,mszewczy/odoo,ojengwa/odoo,Ernesto99/odoo,Gitlab11/odoo,nhomar/odoo-mirror,patmcb/odoo,Nick-OpusVL/odoo,chiragjogi/odoo,kittiu/odoo,gorjuce/odoo,mmbtba/odoo,dariemp/odoo,podemos-info/odoo,takis/odoo,kifcaliph/odoo,datenbetrieb/odoo,OpenUpgrade-dev/OpenUpgrade,tarzan0820/odoo,BT-rmartin/odoo,Codefans-fan/odoo,podemos-info/odoo,Endika/OpenUpgrade,ramitalat/odoo,odooindia/odoo,eino-makitalo/odoo,juanalfonsopr/odoo,alexteodor/odoo,cedk/odoo,gdgellatly/OCB1,papouso/odoo,hbrunn/OpenUpgrade,joariasl/odoo,sebalix/OpenUpgrade,leorochael/odoo,n0m4dz/odoo,leoliujie/odoo,provaleks/o8,0k/odoo,alexteodor/odoo,OpenPymeMx/OCB,Elico-Corp/odoo_OCB,ccomb/OpenUpgrade,bealdav/OpenUpgrade,PongPi/isl-odoo,nuncjo/odoo,hassoon3/odoo,Gitlab11/odoo,rgeleta/odoo,Codefans-fan/odoo,optima-ict/odoo,hoatle/odoo,SerpentCS/odoo,rahuldhote/odoo,tvtsoft/odoo8,salaria/odoo,bealdav/OpenUpgrade,windedge/odoo,avoinsystems/odoo,cedk/odoo,tinkhaven-organization/odoo,abdellatifkarroum/odoo,microcom/odoo,provaleks/o8,funkring/fdoo,klunwebale/odoo,grap/OpenUpgrade,dalegregory/odoo,fuselock/odoo,virgree/odoo,blaggacao/OpenUpgrade,OpenUpgrade/OpenUpgrade,mkieszek/odoo,ecosoft-odoo/odoo,sadleader/odoo,sinbazhou/odoo,grap/OCB,camptocamp/ngo-addons-backport,waytai/odoo,juanalfonsopr/odoo,QianBIG/odoo,abdellatifkarroum/odoo,GauravSahu/odoo,bobisme/odoo,PongPi/isl-odoo,agrista/odoo-saas,JGarcia-Panach/odoo,bwrsandman/OpenUpgrade,fgesora/odoo,highco-groupe/odoo,credativUK/OCB,codekaki/odoo,bguillot/OpenUpgrade,draugiskisprendimai/odoo,simongoffin/website_version,Ichag/odoo,pedrobaeza/OpenUpgrade,ramitalat/odoo,hip-odoo/odoo,luiseduardohdbackup/odoo,ygol/odoo,Nick-OpusVL/odoo,incaser/odoo-odoo,janocat/odoo,klunwebale/odoo,feroda/odoo,shaufi10/odoo,eino-makitalo/odoo,camptocamp/ngo-addons-backport,tinkerthaler/odoo,jeasoft/odoo,Codefans-fan/odoo,Eric-Zhong/odoo,avoinsystems/odoo,apanju/GMIO_Odoo,cloud9UG/odoo,nagyistoce/odoo-dev-odoo,savoirfairelinux/OpenUpgrade,shaufi/odoo,pplatek/odoo,goliveirab/odoo,hubsaysnuaa/odoo,prospwro/odoo,erkrishna9/odoo,sebalix/OpenUpgrade,MarcosCommunity/odoo,BT-fgarbely/odoo,papouso/odoo,BT-fgarbely/odoo,ShineFan/odoo,shingonoide/odoo,odootr/odoo,datenbetrieb/odoo,jfpla/odoo,gvb/odoo,sergio-incaser/odoo,juanalfonsopr/odoo,Elico-Corp/odoo_OCB,florian-dacosta/OpenUpgrade,nuuuboo/odoo,gsmartway/odoo,savoirfairelinux/odoo,elmerdpadilla/iv,ingadhoc/odoo,hassoon3/odoo,janocat/odoo,ihsanudin/odoo,erkrishna9/odoo,massot/odoo,Maspear/odoo,bplancher/odoo,dezynetechnologies/odoo,tinkerthaler/odoo,Endika/odoo,savoirfairelinux/odoo,Gitlab11/odoo,kifcaliph/odoo,shingonoide/odoo,colinnewell/odoo,apanju/odoo,Ichag/odoo,fgesora/odoo,dsfsdgsbngfggb/odoo,waytai/odoo,srimai/odoo,naousse/odoo,Antiun/odoo,abstract-open-solutions/OCB,alhashash/odoo,fgesora/odoo,SAM-IT-SA/odoo,sinbazhou/odoo,incaser/odoo-odoo,OSSESAC/odoopubarquiluz,srsman/odoo,xujb/odoo,guewen/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,VielSoft/odoo,omprakasha/odoo,omprakasha/odoo,srimai/odoo,alqfahad/odoo,gdgellatly/OCB1,mszewczy/odoo,sysadminmatmoz/OCB,FlorianLudwig/odoo,odoo-turkiye/odoo,Kilhog/odoo,frouty/odoogoeen,datenbetrieb/odoo,leoliujie/odoo,GauravSahu/odoo,mmbtba/odoo,agrista/odoo-saas,cysnake4713/odoo,stephen144/odoo,bobisme/odoo,x111ong/odoo,poljeff/odoo,bobisme/odoo,factorlibre/OCB,vrenaville/ngo-addons-backport,colinnewell/odoo,AuyaJackie/odoo,xujb/odoo,rahuldhote/odoo,oihane/odoo,stephen144/odoo,salaria/odoo,BT-rmartin/odoo,JCA-Developpement/Odoo,Bachaco-ve/odoo,nitinitprof/odoo,florian-dacosta/OpenUpgrade,camptocamp/ngo-addons-backport,sve-odoo/odoo,addition-it-solutions/project-all,lombritz/odoo,ramadhane/odoo,jiangzhixiao/odoo,fuhongliang/odoo,fevxie/odoo,FlorianLudwig/odoo,alexteodor/odoo,tinkhaven-organization/odoo,hip-odoo/odoo,CopeX/odoo,hopeall/odoo,dsfsdgsbngfggb/odoo,frouty/odoo_oph,alexteodor/odoo,hassoon3/odoo,dsfsdgsbngfggb/odoo,JGarcia-Panach/odoo,oihane/odoo,juanalfonsopr/odoo,apocalypsebg/odoo,GauravSahu/odoo,BT-ojossen/odoo,xzYue/odoo,thanhacun/odoo,syci/OCB,alhashash/odoo,SAM-IT-SA/odoo,hubsaysnuaa/odoo,SAM-IT-SA/odoo,aviciimaxwell/odoo,idncom/odoo,addition-it-solutions/project-all,Gitlab11/odoo,oasiswork/odoo,christophlsa/odoo,ramadhane/odoo,frouty/odoo_oph,gorjuce/odoo,Nowheresly/odoo,bakhtout/odoo-educ,joshuajan/odoo,jolevq/odoopub,nuuuboo/odoo,SAM-IT-SA/odoo,nhomar/odoo-mirror,OpenUpgrade/OpenUpgrade,vrenaville/ngo-addons-backport,jaxkodex/odoo,javierTerry/odoo,draugiskisprendimai/odoo,goliveirab/odoo,tinkhaven-organization/odoo,omprakasha/odoo,BT-astauder/odoo,rowemoore/odoo,Bachaco-ve/odoo,acshan/odoo,salaria/odoo,dkubiak789/odoo,credativUK/OCB,colinnewell/odoo,gavin-feng/odoo,collex100/odoo,0k/OpenUpgrade,xujb/odoo,minhtuancn/odoo,hanicker/odoo,ubic135/odoo-design,BT-rmartin/odoo,dllsf/odootest,codekaki/odoo,QianBIG/odoo,bkirui/odoo,nhomar/odoo-mirror,inspyration/odoo,ihsanudin/odoo,KontorConsulting/odoo,fjbatresv/odoo,stephen144/odoo,abstract-open-solutions/OCB,n0m4dz/odoo,savoirfairelinux/OpenUpgrade,sinbazhou/odoo,grap/OpenUpgrade,BT-ojossen/odoo,shivam1111/odoo,fuhongliang/odoo,tarzan0820/odoo,credativUK/OCB,Maspear/odoo,funkring/fdoo,fjbatresv/odoo,Maspear/odoo,NeovaHealth/odoo,minhtuancn/odoo,apanju/odoo,shaufi10/odoo,dezynetechnologies/odoo,Endika/odoo,kybriainfotech/iSocioCRM,janocat/odoo,ChanduERP/odoo,elmerdpadilla/iv,hbrunn/OpenUpgrade,highco-groupe/odoo,numerigraphe/odoo,sve-odoo/odoo,poljeff/odoo,ApuliaSoftware/odoo,jfpla/odoo,abdellatifkarroum/odoo,srimai/odoo,dllsf/odootest,alqfahad/odoo,Drooids/odoo,alexcuellar/odoo,kybriainfotech/iSocioCRM,addition-it-solutions/project-all,hip-odoo/odoo,syci/OCB,sv-dev1/odoo,Noviat/odoo,ChanduERP/odoo,gavin-feng/odoo,VielSoft/odoo,odoousers2014/odoo,ujjwalwahi/odoo,n0m4dz/odoo,janocat/odoo,shingonoide/odoo,jiachenning/odoo,sysadminmatmoz/OCB,mszewczy/odoo,doomsterinc/odoo,hoatle/odoo,dsfsdgsbngfggb/odoo,JGarcia-Panach/odoo,matrixise/odoo,nitinitprof/odoo,havt/odoo,jpshort/odoo,cedk/odoo,nhomar/odoo,lgscofield/odoo,jpshort/odoo,joariasl/odoo,oihane/odoo,rowemoore/odoo,luistorresm/odoo,chiragjogi/odoo,virgree/odoo,rgeleta/odoo,factorlibre/OCB,ChanduERP/odoo,oasiswork/odoo,ingadhoc/odoo,Eric-Zhong/odoo,0k/OpenUpgrade,hopeall/odoo,bakhtout/odoo-educ,salaria/odoo,gorjuce/odoo,ujjwalwahi/odoo,odoo-turkiye/odoo,bplancher/odoo,ccomb/OpenUpgrade,patmcb/odoo,JGarcia-Panach/odoo,vnsofthe/odoo,spadae22/odoo,mvaled/OpenUpgrade,thanhacun/odoo,dsfsdgsbngfggb/odoo,naousse/odoo,Adel-Magebinary/odoo,PongPi/isl-odoo,MarcosCommunity/odoo,Danisan/odoo-1,cedk/odoo,numerigraphe/odoo,patmcb/odoo,steedos/odoo,x111ong/odoo,avoinsystems/odoo,provaleks/o8,Drooids/odoo,jiachenning/odoo,0k/odoo,pplatek/odoo,hifly/OpenUpgrade,cdrooom/odoo,AuyaJackie/odoo,hifly/OpenUpgrade,takis/odoo,Ichag/odoo,nexiles/odoo,rdeheele/odoo,arthru/OpenUpgrade,x111ong/odoo,nexiles/odoo,Daniel-CA/odoo,poljeff/odoo,kirca/OpenUpgrade,savoirfairelinux/odoo,jfpla/odoo,Adel-Magebinary/odoo,luiseduardohdbackup/odoo,jeasoft/odoo,Ernesto99/odoo,RafaelTorrealba/odoo,lgscofield/odoo,jesramirez/odoo,tvibliani/odoo,salaria/odoo,slevenhagen/odoo-npg,steedos/odoo,fevxie/odoo,srsman/odoo,florian-dacosta/OpenUpgrade,odoousers2014/odoo,BT-fgarbely/odoo,mmbtba/odoo,kirca/OpenUpgrade,nitinitprof/odoo,jpshort/odoo,mkieszek/odoo,ihsanudin/odoo,hanicker/odoo,ThinkOpen-Solutions/odoo,incaser/odoo-odoo,florentx/OpenUpgrade,poljeff/odoo,ovnicraft/odoo,janocat/odoo,Endika/OpenUpgrade,mmbtba/odoo,VielSoft/odoo,dalegregory/odoo,Codefans-fan/odoo,cedk/odoo,Gitlab11/odoo,feroda/odoo,Eric-Zhong/odoo,Danisan/odoo-1,CubicERP/odoo,datenbetrieb/odoo,BT-astauder/odoo,pplatek/odoo,tangyiyong/odoo,storm-computers/odoo,numerigraphe/odoo,csrocha/OpenUpgrade,stephen144/odoo,pedrobaeza/odoo,stonegithubs/odoo,brijeshkesariya/odoo,feroda/odoo,Bachaco-ve/odoo,osvalr/odoo,hassoon3/odoo,shingonoide/odoo,grap/OCB,lightcn/odoo,grap/OCB,chiragjogi/odoo,dalegregory/odoo,GauravSahu/odoo,camptocamp/ngo-addons-backport,addition-it-solutions/project-all,bobisme/odoo,leorochael/odoo,fuhongliang/odoo,odootr/odoo,windedge/odoo,hanicker/odoo,frouty/odoogoeen,abenzbiria/clients_odoo,abstract-open-solutions/OCB,dgzurita/odoo,storm-computers/odoo,bplancher/odoo,ojengwa/odoo,wangjun/odoo,RafaelTorrealba/odoo,spadae22/odoo,hubsaysnuaa/odoo,demon-ru/iml-crm,xzYue/odoo,kifcaliph/odoo,stonegithubs/odoo,ygol/odoo,avoinsystems/odoo,xzYue/odoo,ojengwa/odoo,lightcn/odoo,oasiswork/odoo,alqfahad/odoo,grap/OpenUpgrade,dsfsdgsbngfggb/odoo,ClearCorp-dev/odoo,ovnicraft/odoo,andreparames/odoo,csrocha/OpenUpgrade,kifcaliph/odoo,andreparames/odoo,javierTerry/odoo,Endika/OpenUpgrade,RafaelTorrealba/odoo,ApuliaSoftware/odoo,luistorresm/odoo,hubsaysnuaa/odoo,alqfahad/odoo,odooindia/odoo,Daniel-CA/odoo,ApuliaSoftware/odoo,PongPi/isl-odoo,nuncjo/odoo,syci/OCB,jfpla/odoo,alexteodor/odoo,kifcaliph/odoo,tvibliani/odoo,numerigraphe/odoo,klunwebale/odoo,hanicker/odoo,rschnapka/odoo,fuhongliang/odoo,doomsterinc/odoo,fevxie/odoo,tvibliani/odoo,eino-makitalo/odoo,hopeall/odoo,jusdng/odoo,damdam-s/OpenUpgrade,deKupini/erp,0k/OpenUpgrade,MarcosCommunity/odoo,ramitalat/odoo,blaggacao/OpenUpgrade,abdellatifkarroum/odoo,idncom/odoo,slevenhagen/odoo-npg,xujb/odoo,gorjuce/odoo,takis/odoo,havt/odoo,fgesora/odoo,nexiles/odoo,zchking/odoo,stonegithubs/odoo,addition-it-solutions/project-all,realsaiko/odoo,Adel-Magebinary/odoo,microcom/odoo,hbrunn/OpenUpgrade,jusdng/odoo,CubicERP/odoo,csrocha/OpenUpgrade,goliveirab/odoo,kirca/OpenUpgrade,chiragjogi/odoo,Eric-Zhong/odoo,guewen/OpenUpgrade,jiangzhixiao/odoo,wangjun/odoo,omprakasha/odoo,diagramsoftware/odoo,sv-dev1/odoo,ubic135/odoo-design,massot/odoo,hifly/OpenUpgrade,bguillot/OpenUpgrade,slevenhagen/odoo-npg,dgzurita/odoo,glovebx/odoo,brijeshkesariya/odoo,syci/OCB,abdellatifkarroum/odoo,takis/odoo,hmen89/odoo,JonathanStein/odoo,fossoult/odoo,nuuuboo/odoo,datenbetrieb/odoo,havt/odoo,dfang/odoo,virgree/odoo,simongoffin/website_version,ehirt/odoo,mvaled/OpenUpgrade,alqfahad/odoo,Antiun/odoo,BT-fgarbely/odoo,BT-fgarbely/odoo,ShineFan/odoo,oliverhr/odoo,fjbatresv/odoo,ccomb/OpenUpgrade,rowemoore/odoo,rdeheele/odoo,fuselock/odoo,sv-dev1/odoo,Maspear/odoo,tinkhaven-organization/odoo,agrista/odoo-saas,Endika/OpenUpgrade,damdam-s/OpenUpgrade,Endika/odoo,luistorresm/odoo,glovebx/odoo,nuncjo/odoo,doomsterinc/odoo,Bachaco-ve/odoo,0k/OpenUpgrade,shaufi10/odoo,codekaki/odoo,numerigraphe/odoo,sebalix/OpenUpgrade,mlaitinen/odoo,gdgellatly/OCB1,odootr/odoo,tinkerthaler/odoo,salaria/odoo,ramitalat/odoo,mmbtba/odoo,apanju/GMIO_Odoo,eino-makitalo/odoo,dgzurita/odoo,nuncjo/odoo,jpshort/odoo,gvb/odoo,podemos-info/odoo,lombritz/odoo,VitalPet/odoo,guerrerocarlos/odoo,fdvarela/odoo8,sinbazhou/odoo,lombritz/odoo,hanicker/odoo,ingadhoc/odoo,NeovaHealth/odoo,TRESCLOUD/odoopub,mvaled/OpenUpgrade,damdam-s/OpenUpgrade,OpusVL/odoo,ingadhoc/odoo,spadae22/odoo,ecosoft-odoo/odoo,fgesora/odoo,ChanduERP/odoo,CubicERP/odoo,x111ong/odoo,0k/odoo,podemos-info/odoo,kittiu/odoo,deKupini/erp,rahuldhote/odoo,ramitalat/odoo,prospwro/odoo,stonegithubs/odoo,zchking/odoo,incaser/odoo-odoo,alexcuellar/odoo,omprakasha/odoo,cloud9UG/odoo,SAM-IT-SA/odoo,mlaitinen/odoo,vrenaville/ngo-addons-backport,rowemoore/odoo,Kilhog/odoo,cysnake4713/odoo,joshuajan/odoo,CubicERP/odoo,windedge/odoo,hmen89/odoo,collex100/odoo,0k/odoo,n0m4dz/odoo,slevenhagen/odoo,jeasoft/odoo,leorochael/odoo,AuyaJackie/odoo,pedrobaeza/odoo,inspyration/odoo,nuncjo/odoo,PongPi/isl-odoo,kirca/OpenUpgrade,Nowheresly/odoo,fdvarela/odoo8,ehirt/odoo,chiragjogi/odoo,VielSoft/odoo,syci/OCB,sve-odoo/odoo,ecosoft-odoo/odoo,Eric-Zhong/odoo,Daniel-CA/odoo,minhtuancn/odoo,tvibliani/odoo,erkrishna9/odoo,aviciimaxwell/odoo,ujjwalwahi/odoo,charbeljc/OCB,shaufi/odoo,fossoult/odoo,gvb/odoo,vrenaville/ngo-addons-backport,acshan/odoo,FlorianLudwig/odoo,bguillot/OpenUpgrade,takis/odoo,ehirt/odoo,slevenhagen/odoo,guerrerocarlos/odoo,bkirui/odoo,addition-it-solutions/project-all,klunwebale/odoo,OpenPymeMx/OCB,rgeleta/odoo,Drooids/odoo,SerpentCS/odoo,jfpla/odoo,massot/odoo,CubicERP/odoo,BT-rmartin/odoo,alexcuellar/odoo,colinnewell/odoo,fossoult/odoo,minhtuancn/odoo,FlorianLudwig/odoo,ClearCorp-dev/odoo,christophlsa/odoo,lsinfo/odoo,zchking/odoo,apocalypsebg/odoo,jaxkodex/odoo,Daniel-CA/odoo,kirca/OpenUpgrade,aviciimaxwell/odoo,0k/OpenUpgrade,waytai/odoo,cdrooom/odoo,dariemp/odoo,Codefans-fan/odoo,frouty/odoo_oph,codekaki/odoo,optima-ict/odoo,prospwro/odoo,prospwro/odoo,doomsterinc/odoo,Kilhog/odoo,OpenUpgrade/OpenUpgrade,BT-fgarbely/odoo,tinkerthaler/odoo,feroda/odoo,Endika/OpenUpgrade,fuselock/odoo,bealdav/OpenUpgrade,Nowheresly/odoo,Ernesto99/odoo,mkieszek/odoo,CubicERP/odoo,pplatek/odoo,nagyistoce/odoo-dev-odoo,deKupini/erp,apanju/odoo,BT-ojossen/odoo,florentx/OpenUpgrade,tvibliani/odoo,funkring/fdoo,guewen/OpenUpgrade,highco-groupe/odoo,Nick-OpusVL/odoo,apanju/GMIO_Odoo,elmerdpadilla/iv,highco-groupe/odoo,synconics/odoo,apanju/odoo,papouso/odoo,diagramsoftware/odoo,cysnake4713/odoo,CatsAndDogsbvba/odoo,realsaiko/odoo,NeovaHealth/odoo,NeovaHealth/odoo,leoliujie/odoo,diagramsoftware/odoo,bguillot/OpenUpgrade,ramadhane/odoo,thanhacun/odoo,Ichag/odoo,Ichag/odoo,abdellatifkarroum/odoo,guerrerocarlos/odoo,agrista/odoo-saas,xujb/odoo,damdam-s/OpenUpgrade,sergio-incaser/odoo,Endika/OpenUpgrade,joshuajan/odoo,hbrunn/OpenUpgrade,VielSoft/odoo,bkirui/odoo,deKupini/erp,ubic135/odoo-design,erkrishna9/odoo,demon-ru/iml-crm,Nick-OpusVL/odoo,nhomar/odoo,lsinfo/odoo,srsman/odoo,rowemoore/odoo,collex100/odoo,ujjwalwahi/odoo,kittiu/odoo,nitinitprof/odoo,slevenhagen/odoo-npg,zchking/odoo,virgree/odoo,ihsanudin/odoo,elmerdpadilla/iv,ingadhoc/odoo,shaufi10/odoo,spadae22/odoo,klunwebale/odoo,OSSESAC/odoopubarquiluz,virgree/odoo,NL66278/OCB,oihane/odoo,0k/odoo,VitalPet/odoo,oliverhr/odoo,zchking/odoo,hip-odoo/odoo,sv-dev1/odoo,glovebx/odoo,mlaitinen/odoo,jolevq/odoopub,hubsaysnuaa/odoo,hbrunn/OpenUpgrade,Antiun/odoo,Ernesto99/odoo,AuyaJackie/odoo,javierTerry/odoo,joshuajan/odoo,grap/OpenUpgrade,odoousers2014/odoo,OSSESAC/odoopubarquiluz,credativUK/OCB,shivam1111/odoo,dllsf/odootest,goliveirab/odoo,gavin-feng/odoo,CopeX/odoo,AuyaJackie/odoo,luistorresm/odoo,glovebx/odoo,ubic135/odoo-design,Bachaco-ve/odoo,laslabs/odoo,CatsAndDogsbvba/odoo,rschnapka/odoo,ygol/odoo,collex100/odoo,lombritz/odoo,ThinkOpen-Solutions/odoo,QianBIG/odoo,storm-computers/odoo,0k/OpenUpgrade,sv-dev1/odoo,shingonoide/odoo,markeTIC/OCB,spadae22/odoo,KontorConsulting/odoo,sadleader/odoo,gsmartway/odoo,osvalr/odoo,BT-astauder/odoo,xzYue/odoo,Nowheresly/odoo,virgree/odoo,arthru/OpenUpgrade,Maspear/odoo,erkrishna9/odoo,mszewczy/odoo,makinacorpus/odoo,wangjun/odoo,jiangzhixiao/odoo,codekaki/odoo,cloud9UG/odoo,sysadminmatmoz/OCB,rubencabrera/odoo,Maspear/odoo,optima-ict/odoo,bkirui/odoo,doomsterinc/odoo,vnsofthe/odoo,dllsf/odootest,laslabs/odoo,grap/OpenUpgrade,Ichag/odoo,mustafat/odoo-1,damdam-s/OpenUpgrade,credativUK/OCB,BT-ojossen/odoo,xujb/odoo,alexcuellar/odoo,charbeljc/OCB,rgeleta/odoo,rubencabrera/odoo,nexiles/odoo,havt/odoo,CatsAndDogsbvba/odoo,makinacorpus/odoo,KontorConsulting/odoo,CatsAndDogsbvba/odoo,ShineFan/odoo,OpusVL/odoo,windedge/odoo,bobisme/odoo,dariemp/odoo,Noviat/odoo,jfpla/odoo,jfpla/odoo,jpshort/odoo,jolevq/odoopub,andreparames/odoo,odooindia/odoo,hoatle/odoo,pplatek/odoo,jiachenning/odoo,podemos-info/odoo,luistorresm/odoo,brijeshkesariya/odoo
164a80ce3bcffad0e233426830c712cddd2f750b
thefederation/apps.py
thefederation/apps.py
import datetime import sys import django_rq from django.apps import AppConfig class TheFederationConfig(AppConfig): name = "thefederation" verbose_name = "The Federation" def ready(self): # Only register tasks if RQ Scheduler process if "rqscheduler" not in sys.argv: return from thefederation.social import make_daily_post from thefederation.tasks import aggregate_daily_stats from thefederation.tasks import clean_duplicate_nodes from thefederation.tasks import poll_nodes scheduler = django_rq.get_scheduler() # Delete any existing jobs in the scheduler when the app starts up for job in scheduler.get_jobs(): job.delete() scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=aggregate_daily_stats, interval=5500, queue_name='high', ) scheduler.cron( '0 10 * * *', func=make_daily_post, queue_name='high', ) scheduler.cron( '18 4 * * *', func=clean_duplicate_nodes, queue_name='medium', ) scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=poll_nodes, interval=10800, queue_name='medium', )
import datetime import sys import django_rq from django.apps import AppConfig class TheFederationConfig(AppConfig): name = "thefederation" verbose_name = "The Federation" def ready(self): # Only register tasks if RQ Scheduler process if "rqscheduler" not in sys.argv: return from thefederation.social import make_daily_post from thefederation.tasks import aggregate_daily_stats from thefederation.tasks import clean_duplicate_nodes from thefederation.tasks import poll_nodes scheduler = django_rq.get_scheduler() # Delete any existing jobs in the scheduler when the app starts up for job in scheduler.get_jobs(): job.delete() scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=aggregate_daily_stats, interval=5500, queue_name='high', ) scheduler.cron( '0 10 * * *', func=make_daily_post, queue_name='high', ) scheduler.cron( '18 4 * * *', func=clean_duplicate_nodes, queue_name='medium', timeout=3600, ) scheduler.schedule( scheduled_time=datetime.datetime.utcnow(), func=poll_nodes, interval=10800, queue_name='medium', )
Increase timeout of clean_duplicate_nodes job
Increase timeout of clean_duplicate_nodes job
Python
agpl-3.0
jaywink/diaspora-hub,jaywink/diaspora-hub,jaywink/the-federation.info,jaywink/diaspora-hub,jaywink/the-federation.info,jaywink/the-federation.info
fd5ebe9ae938cdf0d586bf3177730619b8b2025a
django_auto_filter/filter_for_models.py
django_auto_filter/filter_for_models.py
from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from django_auto_filter.views_django_auto_filter_new import DjangoAutoFilterNew from djangoautoconf.model_utils.model_attr_utils import model_enumerator from ufs_tools.string_tools import class_name_to_low_case def add_filter_to_url_for(urlpatterns, models): for model in model_enumerator(models): urlpatterns += patterns('', url(r'^models/%s/' % class_name_to_low_case(model.__name__), DjangoAutoFilterNew.as_view(model_class=model))) def get_filter_urls(models, template_name=None): url_list = [] for model in model_enumerator(models): param_dict = {"model": model} if template_name is not None: param_dict["template_name"] = template_name url_list.append(url(r'^model/%s/' % class_name_to_low_case(model.__name__), login_required(DjangoAutoFilterNew.as_view(**param_dict)))) p = patterns('', *url_list) return p
from django.conf.urls import patterns, url from django.contrib.auth.decorators import login_required from django_auto_filter.views_django_auto_filter_new import DjangoAutoFilterNew from djangoautoconf.model_utils.model_attr_utils import model_enumerator from ufs_tools.string_tools import class_name_to_low_case def add_filter_to_url_for(urlpatterns, models): for model in model_enumerator(models): urlpatterns += patterns('', url(r'^models/%s/' % class_name_to_low_case(model.__name__), DjangoAutoFilterNew.as_view(model=model))) def get_filter_urls(models, template_name=None): url_list = [] for model in model_enumerator(models): param_dict = {"model": model} if template_name is not None: param_dict["template_name"] = template_name url_list.append(url(r'^model/%s/' % class_name_to_low_case(model.__name__), login_required(DjangoAutoFilterNew.as_view(**param_dict)))) p = patterns('', *url_list) return p
Fix attribute from model_class to model issue.
Fix attribute from model_class to model issue.
Python
bsd-3-clause
weijia/django-auto-filter,weijia/django-auto-filter,weijia/django-auto-filter
0cdfabf24c01920617535205dfcdba7a187b4d32
doc/_ext/saltdocs.py
doc/_ext/saltdocs.py
def setup(app): """Additions and customizations to Sphinx that are useful for documenting the Salt project. """ app.add_crossref_type(directivename="conf_master", rolename="conf_master", indextemplate="pair: %s; conf/master") app.add_crossref_type(directivename="conf_minion", rolename="conf_minion", indextemplate="pair: %s; conf/minion")
def setup(app): """Additions and customizations to Sphinx that are useful for documenting the Salt project. """ app.add_crossref_type(directivename="conf_master", rolename="conf_master", indextemplate="pair: %s; conf/master") app.add_crossref_type(directivename="conf_minion", rolename="conf_minion", indextemplate="pair: %s; conf/minion") app.add_crossref_type(directivename="conf-log", rolename="conf-log", indextemplate="pair: %s; conf/logging")
Allow the `conf-log` role to link to the logging documentation.
Allow the `conf-log` role to link to the logging documentation.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
db2d0b2f7277f21ce2f500dc0cc4837258fdd200
traceview/__init__.py
traceview/__init__.py
# -*- coding: utf-8 -*- """ TraceView API library :copyright: (c) 2014 by Daniel Riti. :license: MIT, see LICENSE for more details. """ __title__ = 'traceview' __version__ = '0.1.0' __author__ = 'Daniel Riti' __license__ = 'MIT' from .request import Request import resources class TraceView(object): """ Provides access to TraceView API resources. :param api_key: The TraceView API access key. """ def __init__(self, api_key): self.api_key = api_key self.organization = resources.Organization(self.api_key) self.apps = resources.App(self.api_key)
# -*- coding: utf-8 -*- """ TraceView API library :copyright: (c) 2014 by Daniel Riti. :license: MIT, see LICENSE for more details. """ __title__ = 'traceview' __version__ = '0.1.0' __author__ = 'Daniel Riti' __license__ = 'MIT' from .request import Request import resources class TraceView(object): """ Provides access to TraceView API resources. :param api_key: The TraceView API access key. """ def __init__(self, api_key): self.api_key = api_key self.organization = resources.Organization(self.api_key) self.apps = resources.App(self.api_key) self.layers = resources.Layer(self.api_key)
Add layers object attribute to TraceView.
Add layers object attribute to TraceView.
Python
mit
danriti/python-traceview
45cebb2ecd7b30bf33064701967e8690632b0c77
plugins/media.py
plugins/media.py
import plugintypes import tgl class MediaPlugin(plugintypes.TelegramPlugin): """ Upload media files to chat when linked. """ patterns = [ "(https?://[\w\-\_\.\?\:\/\+\=\&]+\.(gif|mp4|pdf|pdf|ogg|zip|mp3|rar|wmv|doc|avi))", ] usage = [ "Automatically detects urls.", ] def run(self, msg, matches): filename = self.bot.download_to_file(matches.group(1), matches.group(2)) if filename: peer = self.bot.get_peer_to_send(msg) tgl.send_document(peer, filename)
import plugintypes import tgl class MediaPlugin(plugintypes.TelegramPlugin): """ Upload media files to chat when linked. """ patterns = [ "(https?://[\w\-\_\.\?\:\/\+\=\&]+\.(gif|mp4|pdf|pdf|ogg|zip|mp3|rar|wmv|doc|avi))v?", ] usage = [ "Automatically detects urls.", ] def run(self, msg, matches): filename = self.bot.download_to_file(matches.group(1), matches.group(2)) if filename: peer = self.bot.get_peer_to_send(msg) tgl.send_document(peer, filename)
Stop grabbing gif versions of gifv's posted.
Stop grabbing gif versions of gifv's posted.
Python
mit
datamachine/telex,Thom7/telex,Thom7/telex,datamachine/telex
bf8d9fa8d309a1e1252acdcb8a6cfe785a27c859
automata/automaton.py
automata/automaton.py
#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): """an abstract base class for finite automata""" def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): """returns True if the given string is accepted by this automaton; raises the appropriate exception if the string is not accepted""" pass @abc.abstractmethod def validate_automaton(self): """returns True if this automaton is internally consistent; raises the appropriate exception if this automaton is invalid""" pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
Add docstrings to Automaton base class
Add docstrings to Automaton base class
Python
mit
caleb531/automata
f4bf48ef24a6d3fcb15c0c86da0cfb48f1533f68
ctypeslib/test/stdio.py
ctypeslib/test/stdio.py
import os from ctypeslib.dynamic_module import include from ctypes import * if os.name == "nt": _libc = CDLL("msvcrt") else: _libc = CDLL(None) include("""\ #include <stdio.h> #ifdef _MSC_VER # include <fcntl.h> #else # include <sys/fcntl.h> #endif """, persist=False)
import os from ctypeslib.dynamic_module import include from ctypes import * if os.name == "nt": _libc = CDLL("msvcrt") else: _libc = CDLL(None) _gen_basename = include("""\ #include <stdio.h> #ifdef _MSC_VER # include <fcntl.h> #else # include <sys/fcntl.h> #endif /* Silly comment */ """, persist=False)
Store the basename of the generated files, to allow the unittests to clean up in the tearDown method.
Store the basename of the generated files, to allow the unittests to clean up in the tearDown method. git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52710 6015fed2-1504-0410-9fe1-9d1591cc4771
Python
mit
luzfcb/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib,trolldbois/ctypeslib,trolldbois/ctypeslib
c0bebbfe9247e367382008d35e98de894038d7f4
grazer/run.py
grazer/run.py
import click import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler logger = logging.getLogger("Verata") @click.command() @click.option("--env", default=find_dotenv()) @click.option("--config") @click.option("--log_level", default="INFO") @click.option("--debug/--info", default=False) @click.option("--output") def main(env, config, log_level, debug, output): if debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) with open(output, "w") as f: for record, link in crawler.create(cfg): logging.debug("Record: {0} Link: {1}".format(record, link)) f.write("({0}, {1})\n".format(record, link)) if __name__ == "__main__": main()
import click import logging from dotenv import load_dotenv, find_dotenv from grazer.config import Config from grazer.core import crawler logger = logging.getLogger("Verata") @click.command() @click.option("--env", default=find_dotenv(), help="Environment file") @click.option("--config", help="Configuration file") @click.option("--log_level", default="INFO", help="Defines a log level", type=click.Choice(["DEBUG", "INFO", "TRACE"])) @click.option("--debug", default=False, is_flag=True, help="Shortcut for DEBUG log level") @click.option("--output", help="All results goes here", is_eager=True) def main(env, config, log_level, debug, output): if debug: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=getattr(logging, log_level)) load_dotenv(env) cfg = Config(config) with open(output, "w") as f: for record, link in crawler.create(cfg): logging.debug("Record: {0} Link: {1}".format(record, link)) f.write("({0}, {1})\n".format(record, link)) if __name__ == "__main__": main()
Make CLI params a bit clearer
Make CLI params a bit clearer
Python
mit
CodersOfTheNight/verata
0e522a5cb9e2967de2a6f03badbe62e1c9c201a9
motobot/core_plugins/network_handlers.py
motobot/core_plugins/network_handlers.py
from motobot import hook from time import sleep @hook('PING') def handle_ping(bot, message): """ Handle the server's pings. """ bot.send('PONG :' + message.params[-1]) @hook('NOTICE') def handle_notice(bot, message): """ Use the notice message to identify and register to the server. """ if not bot.identified: bot.send('USER MotoBot localhost localhost MotoBot') bot.send('NICK ' + bot.nick) sleep(2) if bot.nickserv_password is not None: bot.send('PRIVMSG nickserv :identify ' + bot.nickserv_password) sleep(2) for channel in bot.channels: bot.send('JOIN ' + channel) bot.identified = True @hook('INVITE') def handle_invite(bot, message): """ Join a channel when invited. """ bot.join(message.params[-1]) @hook('ERROR') def handle_error(bot, message): """ Handle an error message from the server. """ bot.connected = bot.identified = False
from motobot import hook from time import sleep @hook('PING') def handle_ping(bot, message): """ Handle the server's pings. """ bot.send('PONG :' + message.params[-1]) @hook('439') def handle_notice(bot, message): """ Use the notice message to identify and register to the server. """ if not bot.identified: bot.send('USER MotoBot localhost localhost MotoBot') bot.send('NICK ' + bot.nick) sleep(2) if bot.nickserv_password is not None: bot.send('PRIVMSG nickserv :identify ' + bot.nickserv_password) sleep(2) for channel in bot.channels: bot.send('JOIN ' + channel) bot.identified = True @hook('INVITE') def handle_invite(bot, message): """ Join a channel when invited. """ bot.join(message.params[-1]) @hook('ERROR') def handle_error(bot, message): """ Handle an error message from the server. """ bot.connected = bot.identified = False
Change connect command to 439
Change connect command to 439
Python
mit
Motoko11/MotoBot
31544a73171c484ee57452d937f0ce040c2963be
wmtsproxy/setup.py
wmtsproxy/setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup(name='wmtsproxy', version='0.1.2', description='WMTSProxy makes WMS/WMTS layers available as WMTS', author='Omniscale GmbH & Co. KG', author_email='[email protected]', packages=find_packages(), license='Apache 2', install_requires=[ "PyYAML", "requests", "mapproxy>=1.7.0", ] )
#!/usr/bin/env python from setuptools import setup, find_packages setup(name='wmtsproxy', version='0.2.0', description='WMTSProxy makes WMS/WMTS layers available as WMTS', author='Omniscale GmbH & Co. KG', author_email='[email protected]', packages=find_packages(), license='Apache 2', install_requires=[ "PyYAML", "requests", "mapproxy>=1.7.0", ] )
Update new version following semver
Update new version following semver
Python
apache-2.0
CartoDB/wmtsproxy,CartoDB/wmtsproxy
7f62587e099b9ef59731b6387030431b09f663f9
bot_chucky/helpers.py
bot_chucky/helpers.py
""" Helper classes """ import facebook import requests as r class FacebookData: def __init__(self, token): """ :param token: Facebook Page token :param _api: Instance of the GraphAPI object """ self.token = token self._api = facebook.GraphAPI(self.token) def get_user_name(self, _id): """ :param _id: find user object by _id :return: first name of user, type -> str """ if not isinstance(_id, str): raise ValueError('id must be a str') user = self._api.get_object(_id) return user['first_name'] if user else None class WeatherData: """ Class which collect weather data """ def __init__(self, api_token): """ :param api_token: Open Weather TOKEN """ self.token = api_token def get_current_weather(self, city_name): """ :param city_name: Open weather API, find by city name :return dictionary object with information for example: {'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]} """ api_url = f'http://api.openweathermap.org' \ f'/data/2.5/weather?q={city_name}&APPID={self.token}' info = r.get(api_url).json() return info
""" Helper classes """ import facebook import requests as r class FacebookData: def __init__(self, token): """ :param token: Facebook Page token :param _api: Instance of the GraphAPI object """ self.token = token self._api = facebook.GraphAPI(self.token) def get_user_name(self, _id): """ :param _id: find user object by _id :return: first name of user, type -> str """ if not isinstance(_id, str): raise ValueError('id must be a str') user = self._api.get_object(_id) return user['first_name'] if user else None class WeatherData: """ Class which collect weather data """ def __init__(self, api_token): """ :param api_token: Open Weather TOKEN """ self.token = api_token def get_current_weather(self, city_name): """ :param city_name: Open weather API, find by city name :return dictionary object with information for example: {'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]} """ api_url = f'http://api.openweathermap.org' \ f'/data/2.5/weather?q={city_name}&APPID={self.token}' info = r.get(api_url).json() return info class StackOverFlowData: params = {} def get_answer_by_title(self, title): pass
Add StackOverFlowData, not completed yet
Add StackOverFlowData, not completed yet
Python
mit
MichaelYusko/Bot-Chucky
a45942894ace282883da3afa10f6739d30943764
dewbrick/majesticapi.py
dewbrick/majesticapi.py
import argparse import json import os import requests BASE_URL = "https://api.majestic.com/api/json" BASE_PARAMS = {'app_api_key': os.environ.get('THEAPIKEY')} def get(cmd, params): querydict = {'cmd': cmd} querydict.update(BASE_PARAMS) querydict.update(params) response = requests.get(BASE_URL, params=querydict) return json.loads(response.text) def getIndexItemInfo(site): cmd = 'GetIndexItemInfo' params = {'items': '2', 'item0': site, 'item1': 'chrishannam.co.uk', 'datasource': 'fresh'} responsedata = get(cmd, params) if responsedata['Code'] == 'OK': data = responsedata['DataTables']['Results']['Data'][0] for data in responsedata['DataTables']['Results']['Data']: yield { 'speed': data['OutDomainsExternal'] + 1, 'power': data['OutLinksExternal'] + 1, 'agility': data['OutLinksInternal'] + 1, 'strength': data['RefDomainsEDU'] + 1, 'smell': data['CitationFlow'] + 1, } else: yield {} def run(): parser = argparse.ArgumentParser(description="a test thing") parser.add_argument('url') args = parser.parse_args() results = getIndexItemInfo(args.url) for result in results: print(result) if __name__ == '__main__': run()
import argparse import json import os import requests BASE_URL = "https://api.majestic.com/api/json" BASE_PARAMS = {'app_api_key': os.environ.get('THEAPIKEY')} def get(cmd, params): querydict = {'cmd': cmd} querydict.update(BASE_PARAMS) querydict.update(params) response = requests.get(BASE_URL, params=querydict) return json.loads(response.text) def getIndexItemInfo(sitelist): cmd = 'GetIndexItemInfo' params = {'items': len(sitelist), 'datasource': 'fresh'} items = {'item{0}'.format(i): site for i, site in enumerate(sitelist)} params.update(items) responsedata = get(cmd, params) if responsedata['Code'] == 'OK': for data in responsedata['DataTables']['Results']['Data']: yield { 'speed': data['OutDomainsExternal'] + 1, 'power': data['OutLinksExternal'] + 1, 'agility': data['OutLinksInternal'] + 1, 'strength': data['RefDomainsEDU'] + 1, 'smell': data['CitationFlow'] + 1, } else: yield {} def run(): parser = argparse.ArgumentParser(description="a test thing") parser.add_argument('urls', nargs='+') args = parser.parse_args() results = getIndexItemInfo(args.urls) for result in results: print(result) if __name__ == '__main__': run()
Handle multiple sites in single request.
Handle multiple sites in single request.
Python
apache-2.0
ohmygourd/dewbrick,ohmygourd/dewbrick,ohmygourd/dewbrick